{"version":3,"file":"data1744008259795.js","sources":["../node_modules/@algolia/client-abtesting/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/client-abtesting/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/client-abtesting/dist/builds/browser.js","../node_modules/@algolia/client-analytics/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/client-analytics/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/client-analytics/dist/builds/browser.js","../node_modules/@algolia/client-insights/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/client-insights/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/client-insights/dist/builds/browser.js","../node_modules/@algolia/client-personalization/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/client-personalization/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/client-personalization/dist/builds/browser.js","../node_modules/@algolia/client-query-suggestions/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/client-query-suggestions/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/client-query-suggestions/dist/builds/browser.js","../node_modules/algoliasearch/node_modules/@algolia/client-common/dist/common.js","../node_modules/algoliasearch/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/algoliasearch/node_modules/@algolia/client-search/dist/builds/browser.js","../node_modules/@algolia/ingestion/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/ingestion/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/ingestion/dist/builds/browser.js","../node_modules/@algolia/monitoring/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/monitoring/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/monitoring/dist/builds/browser.js","../node_modules/@algolia/recommend/node_modules/@algolia/requester-browser-xhr/dist/requester.xhr.js","../node_modules/@algolia/recommend/node_modules/@algolia/client-common/dist/common.js","../node_modules/@algolia/recommend/dist/builds/browser.js","../node_modules/algoliasearch/dist/browser.js","../node_modules/papaparse/papaparse.min.js"],"sourcesContent":["function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/abtestingClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent } from \"@algolia/client-common\";\nvar apiClientVersion = \"5.19.0\";\nvar REGIONS = [\"de\", \"us\"];\nfunction getDefaultHosts(region) {\n const url = !region ? \"analytics.algolia.com\" : \"analytics.{region}.algolia.com\".replace(\"{region}\", region);\n return [{ url, accept: \"readWrite\", protocol: \"https\" }];\n}\nfunction createAbtestingClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n region: regionOption,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(regionOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Abtesting\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * Creates a new A/B test.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param addABTestsRequest - The addABTestsRequest object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n addABTests(addABTestsRequest, requestOptions) {\n if (!addABTestsRequest) {\n throw new Error(\"Parameter `addABTestsRequest` is required when calling `addABTests`.\");\n }\n if (!addABTestsRequest.name) {\n throw new Error(\"Parameter `addABTestsRequest.name` is required when calling `addABTests`.\");\n }\n if (!addABTestsRequest.variants) {\n throw new Error(\"Parameter `addABTestsRequest.variants` is required when calling `addABTests`.\");\n }\n if (!addABTestsRequest.endAt) {\n throw new Error(\"Parameter `addABTestsRequest.endAt` is required when calling `addABTests`.\");\n }\n const requestPath = \"/2/abtests\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: addABTestsRequest\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes an A/B test by its ID.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param deleteABTest - The deleteABTest object.\n * @param deleteABTest.id - Unique A/B test identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteABTest({ id }, requestOptions) {\n if (!id) {\n throw new Error(\"Parameter `id` is required when calling `deleteABTest`.\");\n }\n const requestPath = \"/2/abtests/{id}\".replace(\"{id}\", encodeURIComponent(id));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Given the traffic percentage and the expected effect size, this endpoint estimates the sample size and duration of an A/B test based on historical traffic.\n *\n * Required API Key ACLs:\n * - analytics\n * @param estimateABTestRequest - The estimateABTestRequest object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n estimateABTest(estimateABTestRequest, requestOptions) {\n if (!estimateABTestRequest) {\n throw new Error(\"Parameter `estimateABTestRequest` is required when calling `estimateABTest`.\");\n }\n if (!estimateABTestRequest.configuration) {\n throw new Error(\"Parameter `estimateABTestRequest.configuration` is required when calling `estimateABTest`.\");\n }\n if (!estimateABTestRequest.variants) {\n throw new Error(\"Parameter `estimateABTestRequest.variants` is required when calling `estimateABTest`.\");\n }\n const requestPath = \"/2/abtests/estimate\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: estimateABTestRequest\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the details for an A/B test by its ID.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getABTest - The getABTest object.\n * @param getABTest.id - Unique A/B test identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getABTest({ id }, requestOptions) {\n if (!id) {\n throw new Error(\"Parameter `id` is required when calling `getABTest`.\");\n }\n const requestPath = \"/2/abtests/{id}\".replace(\"{id}\", encodeURIComponent(id));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Lists all A/B tests you configured for this application.\n *\n * Required API Key ACLs:\n * - analytics\n * @param listABTests - The listABTests object.\n * @param listABTests.offset - Position of the first item to return.\n * @param listABTests.limit - Number of items to return.\n * @param listABTests.indexPrefix - Index name prefix. Only A/B tests for indices starting with this string are included in the response.\n * @param listABTests.indexSuffix - Index name suffix. Only A/B tests for indices ending with this string are included in the response.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listABTests({ offset, limit, indexPrefix, indexSuffix } = {}, requestOptions = void 0) {\n const requestPath = \"/2/abtests\";\n const headers = {};\n const queryParameters = {};\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (indexPrefix !== void 0) {\n queryParameters[\"indexPrefix\"] = indexPrefix.toString();\n }\n if (indexSuffix !== void 0) {\n queryParameters[\"indexSuffix\"] = indexSuffix.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Schedule an A/B test to be started at a later time.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param scheduleABTestsRequest - The scheduleABTestsRequest object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n scheduleABTest(scheduleABTestsRequest, requestOptions) {\n if (!scheduleABTestsRequest) {\n throw new Error(\"Parameter `scheduleABTestsRequest` is required when calling `scheduleABTest`.\");\n }\n if (!scheduleABTestsRequest.name) {\n throw new Error(\"Parameter `scheduleABTestsRequest.name` is required when calling `scheduleABTest`.\");\n }\n if (!scheduleABTestsRequest.variants) {\n throw new Error(\"Parameter `scheduleABTestsRequest.variants` is required when calling `scheduleABTest`.\");\n }\n if (!scheduleABTestsRequest.scheduledAt) {\n throw new Error(\"Parameter `scheduleABTestsRequest.scheduledAt` is required when calling `scheduleABTest`.\");\n }\n if (!scheduleABTestsRequest.endAt) {\n throw new Error(\"Parameter `scheduleABTestsRequest.endAt` is required when calling `scheduleABTest`.\");\n }\n const requestPath = \"/2/abtests/schedule\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: scheduleABTestsRequest\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Stops an A/B test by its ID. You can\\'t restart stopped A/B tests.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param stopABTest - The stopABTest object.\n * @param stopABTest.id - Unique A/B test identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n stopABTest({ id }, requestOptions) {\n if (!id) {\n throw new Error(\"Parameter `id` is required when calling `stopABTest`.\");\n }\n const requestPath = \"/2/abtests/{id}/stop\".replace(\"{id}\", encodeURIComponent(id));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction abtestingClient(appId, apiKey, region, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n if (region && (typeof region !== \"string\" || !REGIONS.includes(region))) {\n throw new Error(`\\`region\\` must be one of the following: ${REGIONS.join(\", \")}`);\n }\n return createAbtestingClient({\n appId,\n apiKey,\n region,\n timeouts: {\n connect: 1e3,\n read: 2e3,\n write: 3e4\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n abtestingClient,\n apiClientVersion\n};\n//# sourceMappingURL=browser.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/analyticsClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent } from \"@algolia/client-common\";\nvar apiClientVersion = \"5.19.0\";\nvar REGIONS = [\"de\", \"us\"];\nfunction getDefaultHosts(region) {\n const url = !region ? \"analytics.algolia.com\" : \"analytics.{region}.algolia.com\".replace(\"{region}\", region);\n return [{ url, accept: \"readWrite\", protocol: \"https\" }];\n}\nfunction createAnalyticsClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n region: regionOption,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(regionOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Analytics\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the add-to-cart rate for all of your searches with at least one add-to-cart event, including a daily breakdown. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getAddToCartRate - The getAddToCartRate object.\n * @param getAddToCartRate.index - Index name.\n * @param getAddToCartRate.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getAddToCartRate.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getAddToCartRate.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getAddToCartRate({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getAddToCartRate`.\");\n }\n const requestPath = \"/2/conversions/addToCartRate\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the average click position of your search results, including a daily breakdown. The average click position is the average of all clicked search results\\' positions. For example, if users only ever click on the first result for any search, the average click position is 1. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getAverageClickPosition - The getAverageClickPosition object.\n * @param getAverageClickPosition.index - Index name.\n * @param getAverageClickPosition.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getAverageClickPosition.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getAverageClickPosition.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getAverageClickPosition({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getAverageClickPosition`.\");\n }\n const requestPath = \"/2/clicks/averageClickPosition\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the positions in the search results and their associated number of clicks. This lets you check how many clicks the first, second, or tenth search results receive.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getClickPositions - The getClickPositions object.\n * @param getClickPositions.index - Index name.\n * @param getClickPositions.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getClickPositions.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getClickPositions.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getClickPositions({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getClickPositions`.\");\n }\n const requestPath = \"/2/clicks/positions\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the click-through rate for all of your searches with at least one click event, including a daily breakdown By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getClickThroughRate - The getClickThroughRate object.\n * @param getClickThroughRate.index - Index name.\n * @param getClickThroughRate.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getClickThroughRate.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getClickThroughRate.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getClickThroughRate({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getClickThroughRate`.\");\n }\n const requestPath = \"/2/clicks/clickThroughRate\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the conversion rate for all of your searches with at least one conversion event, including a daily breakdown. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getConversionRate - The getConversionRate object.\n * @param getConversionRate.index - Index name.\n * @param getConversionRate.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getConversionRate.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getConversionRate.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getConversionRate({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getConversionRate`.\");\n }\n const requestPath = \"/2/conversions/conversionRate\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the fraction of searches that didn\\'t lead to any click within a time range, including a daily breakdown. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getNoClickRate - The getNoClickRate object.\n * @param getNoClickRate.index - Index name.\n * @param getNoClickRate.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getNoClickRate.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getNoClickRate.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getNoClickRate({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getNoClickRate`.\");\n }\n const requestPath = \"/2/searches/noClickRate\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the fraction of searches that didn\\'t return any results within a time range, including a daily breakdown. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getNoResultsRate - The getNoResultsRate object.\n * @param getNoResultsRate.index - Index name.\n * @param getNoResultsRate.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getNoResultsRate.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getNoResultsRate.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getNoResultsRate({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getNoResultsRate`.\");\n }\n const requestPath = \"/2/searches/noResultRate\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the purchase rate for all of your searches with at least one purchase event, including a daily breakdown. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getPurchaseRate - The getPurchaseRate object.\n * @param getPurchaseRate.index - Index name.\n * @param getPurchaseRate.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getPurchaseRate.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getPurchaseRate.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getPurchaseRate({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getPurchaseRate`.\");\n }\n const requestPath = \"/2/conversions/purchaseRate\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves revenue-related metrics, such as the total revenue or the average order value. To retrieve revenue-related metrics, sent purchase events. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getRevenue - The getRevenue object.\n * @param getRevenue.index - Index name.\n * @param getRevenue.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getRevenue.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getRevenue.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getRevenue({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getRevenue`.\");\n }\n const requestPath = \"/2/conversions/revenue\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the number of searches within a time range, including a daily breakdown. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getSearchesCount - The getSearchesCount object.\n * @param getSearchesCount.index - Index name.\n * @param getSearchesCount.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getSearchesCount.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getSearchesCount.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getSearchesCount({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getSearchesCount`.\");\n }\n const requestPath = \"/2/searches/count\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the most popular searches that didn\\'t lead to any clicks, from the 1,000 most frequent searches.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getSearchesNoClicks - The getSearchesNoClicks object.\n * @param getSearchesNoClicks.index - Index name.\n * @param getSearchesNoClicks.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getSearchesNoClicks.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getSearchesNoClicks.limit - Number of items to return.\n * @param getSearchesNoClicks.offset - Position of the first item to return.\n * @param getSearchesNoClicks.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getSearchesNoClicks({ index, startDate, endDate, limit, offset, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getSearchesNoClicks`.\");\n }\n const requestPath = \"/2/searches/noClicks\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the most popular searches that didn\\'t return any results.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getSearchesNoResults - The getSearchesNoResults object.\n * @param getSearchesNoResults.index - Index name.\n * @param getSearchesNoResults.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getSearchesNoResults.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getSearchesNoResults.limit - Number of items to return.\n * @param getSearchesNoResults.offset - Position of the first item to return.\n * @param getSearchesNoResults.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getSearchesNoResults({ index, startDate, endDate, limit, offset, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getSearchesNoResults`.\");\n }\n const requestPath = \"/2/searches/noResults\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the time when the Analytics data for the specified index was last updated. The Analytics data is updated every 5 minutes.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getStatus - The getStatus object.\n * @param getStatus.index - Index name.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getStatus({ index }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getStatus`.\");\n }\n const requestPath = \"/2/status\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the countries with the most searches to your index.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getTopCountries - The getTopCountries object.\n * @param getTopCountries.index - Index name.\n * @param getTopCountries.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopCountries.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopCountries.limit - Number of items to return.\n * @param getTopCountries.offset - Position of the first item to return.\n * @param getTopCountries.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTopCountries({ index, startDate, endDate, limit, offset, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getTopCountries`.\");\n }\n const requestPath = \"/2/countries\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the most frequently used filter attributes. These are attributes of your records that you included in the `attributesForFaceting` setting.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getTopFilterAttributes - The getTopFilterAttributes object.\n * @param getTopFilterAttributes.index - Index name.\n * @param getTopFilterAttributes.search - Search query.\n * @param getTopFilterAttributes.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopFilterAttributes.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopFilterAttributes.limit - Number of items to return.\n * @param getTopFilterAttributes.offset - Position of the first item to return.\n * @param getTopFilterAttributes.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTopFilterAttributes({ index, search, startDate, endDate, limit, offset, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getTopFilterAttributes`.\");\n }\n const requestPath = \"/2/filters\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (search !== void 0) {\n queryParameters[\"search\"] = search.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the most frequent filter (facet) values for a filter attribute. These are attributes of your records that you included in the `attributesForFaceting` setting.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getTopFilterForAttribute - The getTopFilterForAttribute object.\n * @param getTopFilterForAttribute.attribute - Attribute name.\n * @param getTopFilterForAttribute.index - Index name.\n * @param getTopFilterForAttribute.search - Search query.\n * @param getTopFilterForAttribute.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopFilterForAttribute.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopFilterForAttribute.limit - Number of items to return.\n * @param getTopFilterForAttribute.offset - Position of the first item to return.\n * @param getTopFilterForAttribute.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTopFilterForAttribute({ attribute, index, search, startDate, endDate, limit, offset, tags }, requestOptions) {\n if (!attribute) {\n throw new Error(\"Parameter `attribute` is required when calling `getTopFilterForAttribute`.\");\n }\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getTopFilterForAttribute`.\");\n }\n const requestPath = \"/2/filters/{attribute}\".replace(\"{attribute}\", encodeURIComponent(attribute));\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (search !== void 0) {\n queryParameters[\"search\"] = search.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the most frequently used filters for a search that didn\\'t return any results. To get the most frequent searches without results, use the [Retrieve searches without results](#tag/search/operation/getSearchesNoResults) operation.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getTopFiltersNoResults - The getTopFiltersNoResults object.\n * @param getTopFiltersNoResults.index - Index name.\n * @param getTopFiltersNoResults.search - Search query.\n * @param getTopFiltersNoResults.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopFiltersNoResults.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopFiltersNoResults.limit - Number of items to return.\n * @param getTopFiltersNoResults.offset - Position of the first item to return.\n * @param getTopFiltersNoResults.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTopFiltersNoResults({ index, search, startDate, endDate, limit, offset, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getTopFiltersNoResults`.\");\n }\n const requestPath = \"/2/filters/noResults\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (search !== void 0) {\n queryParameters[\"search\"] = search.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the object IDs of the most frequent search results.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getTopHits - The getTopHits object.\n * @param getTopHits.index - Index name.\n * @param getTopHits.search - Search query.\n * @param getTopHits.clickAnalytics - Whether to include metrics related to click and conversion events in the response.\n * @param getTopHits.revenueAnalytics - Whether to include revenue-related metrics in the response. If true, metrics related to click and conversion events are also included in the response.\n * @param getTopHits.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopHits.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopHits.limit - Number of items to return.\n * @param getTopHits.offset - Position of the first item to return.\n * @param getTopHits.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTopHits({ index, search, clickAnalytics, revenueAnalytics, startDate, endDate, limit, offset, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getTopHits`.\");\n }\n const requestPath = \"/2/hits\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (search !== void 0) {\n queryParameters[\"search\"] = search.toString();\n }\n if (clickAnalytics !== void 0) {\n queryParameters[\"clickAnalytics\"] = clickAnalytics.toString();\n }\n if (revenueAnalytics !== void 0) {\n queryParameters[\"revenueAnalytics\"] = revenueAnalytics.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Returns the most popular search terms.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getTopSearches - The getTopSearches object.\n * @param getTopSearches.index - Index name.\n * @param getTopSearches.clickAnalytics - Whether to include metrics related to click and conversion events in the response.\n * @param getTopSearches.revenueAnalytics - Whether to include revenue-related metrics in the response. If true, metrics related to click and conversion events are also included in the response.\n * @param getTopSearches.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopSearches.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getTopSearches.orderBy - Attribute by which to order the response items. If the `clickAnalytics` parameter is false, only `searchCount` is available.\n * @param getTopSearches.direction - Sorting direction of the results: ascending or descending.\n * @param getTopSearches.limit - Number of items to return.\n * @param getTopSearches.offset - Position of the first item to return.\n * @param getTopSearches.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTopSearches({\n index,\n clickAnalytics,\n revenueAnalytics,\n startDate,\n endDate,\n orderBy,\n direction,\n limit,\n offset,\n tags\n }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getTopSearches`.\");\n }\n const requestPath = \"/2/searches\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (clickAnalytics !== void 0) {\n queryParameters[\"clickAnalytics\"] = clickAnalytics.toString();\n }\n if (revenueAnalytics !== void 0) {\n queryParameters[\"revenueAnalytics\"] = revenueAnalytics.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (orderBy !== void 0) {\n queryParameters[\"orderBy\"] = orderBy.toString();\n }\n if (direction !== void 0) {\n queryParameters[\"direction\"] = direction.toString();\n }\n if (limit !== void 0) {\n queryParameters[\"limit\"] = limit.toString();\n }\n if (offset !== void 0) {\n queryParameters[\"offset\"] = offset.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the number of unique users within a time range, including a daily breakdown. Since this endpoint returns the number of unique users, the sum of the daily values might be different from the total number. By default, Algolia distinguishes search users by their IP address, _unless_ you include a pseudonymous user identifier in your search requests with the `userToken` API parameter or `x-algolia-usertoken` request header. By default, the analyzed period includes the last eight days including the current day.\n *\n * Required API Key ACLs:\n * - analytics\n * @param getUsersCount - The getUsersCount object.\n * @param getUsersCount.index - Index name.\n * @param getUsersCount.startDate - Start date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getUsersCount.endDate - End date of the period to analyze, in `YYYY-MM-DD` format.\n * @param getUsersCount.tags - Tags by which to segment the analytics. You can combine multiple tags with `OR` and `AND`. Tags must be URL-encoded. For more information, see [Segment your analytics data](https://www.algolia.com/doc/guides/search-analytics/guides/segments/).\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getUsersCount({ index, startDate, endDate, tags }, requestOptions) {\n if (!index) {\n throw new Error(\"Parameter `index` is required when calling `getUsersCount`.\");\n }\n const requestPath = \"/2/users/count\";\n const headers = {};\n const queryParameters = {};\n if (index !== void 0) {\n queryParameters[\"index\"] = index.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n if (tags !== void 0) {\n queryParameters[\"tags\"] = tags.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction analyticsClient(appId, apiKey, region, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n if (region && (typeof region !== \"string\" || !REGIONS.includes(region))) {\n throw new Error(`\\`region\\` must be one of the following: ${REGIONS.join(\", \")}`);\n }\n return createAnalyticsClient({\n appId,\n apiKey,\n region,\n timeouts: {\n connect: 1e3,\n read: 2e3,\n write: 3e4\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n analyticsClient,\n apiClientVersion\n};\n//# sourceMappingURL=browser.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/insightsClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent } from \"@algolia/client-common\";\nvar apiClientVersion = \"5.19.0\";\nvar REGIONS = [\"de\", \"us\"];\nfunction getDefaultHosts(region) {\n const url = !region ? \"insights.algolia.io\" : \"insights.{region}.algolia.io\".replace(\"{region}\", region);\n return [{ url, accept: \"readWrite\", protocol: \"https\" }];\n}\nfunction createInsightsClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n region: regionOption,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(regionOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Insights\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes all events related to the specified user token from events metrics and analytics. The deletion is asynchronous, and processed within 48 hours. To delete a personalization user profile, see `Delete a user profile` in the Personalization API.\n * @param deleteUserToken - The deleteUserToken object.\n * @param deleteUserToken.userToken - User token for which to delete all associated events.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteUserToken({ userToken }, requestOptions) {\n if (!userToken) {\n throw new Error(\"Parameter `userToken` is required when calling `deleteUserToken`.\");\n }\n const requestPath = \"/1/usertokens/{userToken}\".replace(\"{userToken}\", encodeURIComponent(userToken));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Sends a list of events to the Insights API. You can include up to 1,000 events in a single request, but the request body must be smaller than 2 MB.\n * @param insightsEvents - The insightsEvents object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n pushEvents(insightsEvents, requestOptions) {\n if (!insightsEvents) {\n throw new Error(\"Parameter `insightsEvents` is required when calling `pushEvents`.\");\n }\n if (!insightsEvents.events) {\n throw new Error(\"Parameter `insightsEvents.events` is required when calling `pushEvents`.\");\n }\n const requestPath = \"/1/events\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: insightsEvents\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction insightsClient(appId, apiKey, region, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n if (region && (typeof region !== \"string\" || !REGIONS.includes(region))) {\n throw new Error(`\\`region\\` must be one of the following: ${REGIONS.join(\", \")}`);\n }\n return createInsightsClient({\n appId,\n apiKey,\n region,\n timeouts: {\n connect: 1e3,\n read: 2e3,\n write: 3e4\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n apiClientVersion,\n insightsClient\n};\n//# sourceMappingURL=browser.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/personalizationClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent } from \"@algolia/client-common\";\nvar apiClientVersion = \"5.19.0\";\nvar REGIONS = [\"eu\", \"us\"];\nfunction getDefaultHosts(region) {\n const url = \"personalization.{region}.algolia.com\".replace(\"{region}\", region);\n return [{ url, accept: \"readWrite\", protocol: \"https\" }];\n}\nfunction createPersonalizationClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n region: regionOption,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(regionOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Personalization\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a user profile. The response includes a date and time when the user profile can safely be considered deleted.\n *\n * Required API Key ACLs:\n * - recommendation\n * @param deleteUserProfile - The deleteUserProfile object.\n * @param deleteUserProfile.userToken - Unique identifier representing a user for which to fetch the personalization profile.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteUserProfile({ userToken }, requestOptions) {\n if (!userToken) {\n throw new Error(\"Parameter `userToken` is required when calling `deleteUserProfile`.\");\n }\n const requestPath = \"/1/profiles/{userToken}\".replace(\"{userToken}\", encodeURIComponent(userToken));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the current personalization strategy.\n *\n * Required API Key ACLs:\n * - recommendation\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getPersonalizationStrategy(requestOptions) {\n const requestPath = \"/1/strategies/personalization\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a user profile and their affinities for different facets.\n *\n * Required API Key ACLs:\n * - recommendation\n * @param getUserTokenProfile - The getUserTokenProfile object.\n * @param getUserTokenProfile.userToken - Unique identifier representing a user for which to fetch the personalization profile.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getUserTokenProfile({ userToken }, requestOptions) {\n if (!userToken) {\n throw new Error(\"Parameter `userToken` is required when calling `getUserTokenProfile`.\");\n }\n const requestPath = \"/1/profiles/personalization/{userToken}\".replace(\n \"{userToken}\",\n encodeURIComponent(userToken)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Creates a new personalization strategy.\n *\n * Required API Key ACLs:\n * - recommendation\n * @param personalizationStrategyParams - The personalizationStrategyParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n setPersonalizationStrategy(personalizationStrategyParams, requestOptions) {\n if (!personalizationStrategyParams) {\n throw new Error(\n \"Parameter `personalizationStrategyParams` is required when calling `setPersonalizationStrategy`.\"\n );\n }\n if (!personalizationStrategyParams.eventsScoring) {\n throw new Error(\n \"Parameter `personalizationStrategyParams.eventsScoring` is required when calling `setPersonalizationStrategy`.\"\n );\n }\n if (!personalizationStrategyParams.facetsScoring) {\n throw new Error(\n \"Parameter `personalizationStrategyParams.facetsScoring` is required when calling `setPersonalizationStrategy`.\"\n );\n }\n if (!personalizationStrategyParams.personalizationImpact) {\n throw new Error(\n \"Parameter `personalizationStrategyParams.personalizationImpact` is required when calling `setPersonalizationStrategy`.\"\n );\n }\n const requestPath = \"/1/strategies/personalization\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: personalizationStrategyParams\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction personalizationClient(appId, apiKey, region, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n if (!region || region && (typeof region !== \"string\" || !REGIONS.includes(region))) {\n throw new Error(`\\`region\\` is required and must be one of the following: ${REGIONS.join(\", \")}`);\n }\n return createPersonalizationClient({\n appId,\n apiKey,\n region,\n timeouts: {\n connect: 1e3,\n read: 2e3,\n write: 3e4\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n apiClientVersion,\n personalizationClient\n};\n//# sourceMappingURL=browser.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/querySuggestionsClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent } from \"@algolia/client-common\";\nvar apiClientVersion = \"5.19.0\";\nvar REGIONS = [\"eu\", \"us\"];\nfunction getDefaultHosts(region) {\n const url = \"query-suggestions.{region}.algolia.com\".replace(\"{region}\", region);\n return [{ url, accept: \"readWrite\", protocol: \"https\" }];\n}\nfunction createQuerySuggestionsClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n region: regionOption,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(regionOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"QuerySuggestions\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * Creates a new Query Suggestions configuration. You can have up to 100 configurations per Algolia application.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param configurationWithIndex - The configurationWithIndex object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n createConfig(configurationWithIndex, requestOptions) {\n if (!configurationWithIndex) {\n throw new Error(\"Parameter `configurationWithIndex` is required when calling `createConfig`.\");\n }\n const requestPath = \"/1/configs\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: configurationWithIndex\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a Query Suggestions configuration. Deleting only removes the configuration and stops updates to the Query Suggestions index. To delete the Query Suggestions index itself, use the Search API and the `Delete an index` operation.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param deleteConfig - The deleteConfig object.\n * @param deleteConfig.indexName - Query Suggestions index name.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteConfig({ indexName }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `deleteConfig`.\");\n }\n const requestPath = \"/1/configs/{indexName}\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves all Query Suggestions configurations of your Algolia application.\n *\n * Required API Key ACLs:\n * - settings\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getAllConfigs(requestOptions) {\n const requestPath = \"/1/configs\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a single Query Suggestions configuration by its index name.\n *\n * Required API Key ACLs:\n * - settings\n * @param getConfig - The getConfig object.\n * @param getConfig.indexName - Query Suggestions index name.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getConfig({ indexName }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getConfig`.\");\n }\n const requestPath = \"/1/configs/{indexName}\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Reports the status of a Query Suggestions index.\n *\n * Required API Key ACLs:\n * - settings\n * @param getConfigStatus - The getConfigStatus object.\n * @param getConfigStatus.indexName - Query Suggestions index name.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getConfigStatus({ indexName }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getConfigStatus`.\");\n }\n const requestPath = \"/1/configs/{indexName}/status\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the logs for a single Query Suggestions index.\n *\n * Required API Key ACLs:\n * - settings\n * @param getLogFile - The getLogFile object.\n * @param getLogFile.indexName - Query Suggestions index name.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getLogFile({ indexName }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getLogFile`.\");\n }\n const requestPath = \"/1/logs/{indexName}\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Updates a QuerySuggestions configuration.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param updateConfig - The updateConfig object.\n * @param updateConfig.indexName - Query Suggestions index name.\n * @param updateConfig.configuration - The configuration object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateConfig({ indexName, configuration }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `updateConfig`.\");\n }\n if (!configuration) {\n throw new Error(\"Parameter `configuration` is required when calling `updateConfig`.\");\n }\n if (!configuration.sourceIndices) {\n throw new Error(\"Parameter `configuration.sourceIndices` is required when calling `updateConfig`.\");\n }\n const requestPath = \"/1/configs/{indexName}\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: configuration\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction querySuggestionsClient(appId, apiKey, region, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n if (!region || region && (typeof region !== \"string\" || !REGIONS.includes(region))) {\n throw new Error(`\\`region\\` is required and must be one of the following: ${REGIONS.join(\", \")}`);\n }\n return createQuerySuggestionsClient({\n appId,\n apiKey,\n region,\n timeouts: {\n connect: 1e3,\n read: 2e3,\n write: 3e4\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n apiClientVersion,\n querySuggestionsClient\n};\n//# sourceMappingURL=browser.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// builds/browser.ts\nimport {\n createNullLogger,\n createMemoryCache,\n createFallbackableCache,\n createBrowserLocalStorageCache,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_BROWSER\n} from \"@algolia/client-common\";\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\n\n// src/searchClient.ts\nimport {\n createAuth,\n createTransporter,\n getAlgoliaAgent,\n shuffle,\n ApiError,\n createIterablePromise\n} from \"@algolia/client-common\";\nvar apiClientVersion = \"5.6.1\";\nfunction getDefaultHosts(appId) {\n return [\n {\n url: `${appId}-dsn.algolia.net`,\n accept: \"read\",\n protocol: \"https\"\n },\n {\n url: `${appId}.algolia.net`,\n accept: \"write\",\n protocol: \"https\"\n }\n ].concat(\n shuffle([\n {\n url: `${appId}-1.algolianet.com`,\n accept: \"readWrite\",\n protocol: \"https\"\n },\n {\n url: `${appId}-2.algolianet.com`,\n accept: \"readWrite\",\n protocol: \"https\"\n },\n {\n url: `${appId}-3.algolianet.com`,\n accept: \"readWrite\",\n protocol: \"https\"\n }\n ])\n );\n}\nfunction createSearchClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(appIdOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Search\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * Helper: Wait for a task to be published (completed) for a given `indexName` and `taskID`.\n *\n * @summary Helper method that waits for a task to be published (completed).\n * @param waitForTaskOptions - The `waitForTaskOptions` object.\n * @param waitForTaskOptions.indexName - The `indexName` where the operation was performed.\n * @param waitForTaskOptions.taskID - The `taskID` returned in the method response.\n * @param waitForTaskOptions.maxRetries - The maximum number of retries. 50 by default.\n * @param waitForTaskOptions.timeout - The function to decide how long to wait between retries.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `getTask` method and merged with the transporter requestOptions.\n */\n waitForTask({\n indexName,\n taskID,\n maxRetries = 50,\n timeout = (retryCount) => Math.min(retryCount * 200, 5e3)\n }, requestOptions) {\n let retryCount = 0;\n return createIterablePromise({\n func: () => this.getTask({ indexName, taskID }, requestOptions),\n validate: (response) => response.status === \"published\",\n aggregator: () => retryCount += 1,\n error: {\n validate: () => retryCount >= maxRetries,\n message: () => `The maximum number of retries exceeded. (${retryCount}/${maxRetries})`\n },\n timeout: () => timeout(retryCount)\n });\n },\n /**\n * Helper: Wait for an application-level task to complete for a given `taskID`.\n *\n * @summary Helper method that waits for a task to be published (completed).\n * @param waitForAppTaskOptions - The `waitForTaskOptions` object.\n * @param waitForAppTaskOptions.taskID - The `taskID` returned in the method response.\n * @param waitForAppTaskOptions.maxRetries - The maximum number of retries. 50 by default.\n * @param waitForAppTaskOptions.timeout - The function to decide how long to wait between retries.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `getTask` method and merged with the transporter requestOptions.\n */\n waitForAppTask({\n taskID,\n maxRetries = 50,\n timeout = (retryCount) => Math.min(retryCount * 200, 5e3)\n }, requestOptions) {\n let retryCount = 0;\n return createIterablePromise({\n func: () => this.getAppTask({ taskID }, requestOptions),\n validate: (response) => response.status === \"published\",\n aggregator: () => retryCount += 1,\n error: {\n validate: () => retryCount >= maxRetries,\n message: () => `The maximum number of retries exceeded. (${retryCount}/${maxRetries})`\n },\n timeout: () => timeout(retryCount)\n });\n },\n /**\n * Helper: Wait for an API key to be added, updated or deleted based on a given `operation`.\n *\n * @summary Helper method that waits for an API key task to be processed.\n * @param waitForApiKeyOptions - The `waitForApiKeyOptions` object.\n * @param waitForApiKeyOptions.operation - The `operation` that was done on a `key`.\n * @param waitForApiKeyOptions.key - The `key` that has been added, deleted or updated.\n * @param waitForApiKeyOptions.apiKey - Necessary to know if an `update` operation has been processed, compare fields of the response with it.\n * @param waitForApiKeyOptions.maxRetries - The maximum number of retries. 50 by default.\n * @param waitForApiKeyOptions.timeout - The function to decide how long to wait between retries.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `getApikey` method and merged with the transporter requestOptions.\n */\n waitForApiKey({\n operation,\n key,\n apiKey,\n maxRetries = 50,\n timeout = (retryCount) => Math.min(retryCount * 200, 5e3)\n }, requestOptions) {\n let retryCount = 0;\n const baseIteratorOptions = {\n aggregator: () => retryCount += 1,\n error: {\n validate: () => retryCount >= maxRetries,\n message: () => `The maximum number of retries exceeded. (${retryCount}/${maxRetries})`\n },\n timeout: () => timeout(retryCount)\n };\n if (operation === \"update\") {\n if (!apiKey) {\n throw new Error(\"`apiKey` is required when waiting for an `update` operation.\");\n }\n return createIterablePromise({\n ...baseIteratorOptions,\n func: () => this.getApiKey({ key }, requestOptions),\n validate: (response) => {\n for (const field of Object.keys(apiKey)) {\n const value = apiKey[field];\n const resValue = response[field];\n if (Array.isArray(value) && Array.isArray(resValue)) {\n if (value.length !== resValue.length || value.some((v, index) => v !== resValue[index])) {\n return false;\n }\n } else if (value !== resValue) {\n return false;\n }\n }\n return true;\n }\n });\n }\n return createIterablePromise({\n ...baseIteratorOptions,\n func: () => this.getApiKey({ key }, requestOptions).catch((error) => {\n if (error.status === 404) {\n return void 0;\n }\n throw error;\n }),\n validate: (response) => operation === \"add\" ? response !== void 0 : response === void 0\n });\n },\n /**\n * Helper: Iterate on the `browse` method of the client to allow aggregating objects of an index.\n *\n * @summary Helper method that iterates on the `browse` method.\n * @param browseObjects - The `browseObjects` object.\n * @param browseObjects.indexName - The index in which to perform the request.\n * @param browseObjects.browseParams - The `browse` parameters.\n * @param browseObjects.validate - The validator function. It receive the resolved return of the API call. By default, stops when there is no `cursor` in the response.\n * @param browseObjects.aggregator - The function that runs right after the API call has been resolved, allows you to do anything with the response before `validate`.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `browse` method and merged with the transporter requestOptions.\n */\n browseObjects({ indexName, browseParams, ...browseObjectsOptions }, requestOptions) {\n return createIterablePromise({\n func: (previousResponse) => {\n return this.browse(\n {\n indexName,\n browseParams: {\n cursor: previousResponse ? previousResponse.cursor : void 0,\n ...browseParams\n }\n },\n requestOptions\n );\n },\n validate: (response) => response.cursor === void 0,\n ...browseObjectsOptions\n });\n },\n /**\n * Helper: Iterate on the `searchRules` method of the client to allow aggregating rules of an index.\n *\n * @summary Helper method that iterates on the `searchRules` method.\n * @param browseRules - The `browseRules` object.\n * @param browseRules.indexName - The index in which to perform the request.\n * @param browseRules.searchRulesParams - The `searchRules` method parameters.\n * @param browseRules.validate - The validator function. It receive the resolved return of the API call. By default, stops when there is less hits returned than the number of maximum hits (1000).\n * @param browseRules.aggregator - The function that runs right after the API call has been resolved, allows you to do anything with the response before `validate`.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `searchRules` method and merged with the transporter requestOptions.\n */\n browseRules({ indexName, searchRulesParams, ...browseRulesOptions }, requestOptions) {\n const params = {\n hitsPerPage: 1e3,\n ...searchRulesParams\n };\n return createIterablePromise({\n func: (previousResponse) => {\n return this.searchRules(\n {\n indexName,\n searchRulesParams: {\n ...params,\n page: previousResponse ? previousResponse.page + 1 : params.page || 0\n }\n },\n requestOptions\n );\n },\n validate: (response) => response.nbHits < params.hitsPerPage,\n ...browseRulesOptions\n });\n },\n /**\n * Helper: Iterate on the `searchSynonyms` method of the client to allow aggregating rules of an index.\n *\n * @summary Helper method that iterates on the `searchSynonyms` method.\n * @param browseSynonyms - The `browseSynonyms` object.\n * @param browseSynonyms.indexName - The index in which to perform the request.\n * @param browseSynonyms.validate - The validator function. It receive the resolved return of the API call. By default, stops when there is less hits returned than the number of maximum hits (1000).\n * @param browseSynonyms.aggregator - The function that runs right after the API call has been resolved, allows you to do anything with the response before `validate`.\n * @param browseSynonyms.searchSynonymsParams - The `searchSynonyms` method parameters.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `searchSynonyms` method and merged with the transporter requestOptions.\n */\n browseSynonyms({\n indexName,\n searchSynonymsParams,\n ...browseSynonymsOptions\n }, requestOptions) {\n const params = {\n page: 0,\n ...searchSynonymsParams,\n hitsPerPage: 1e3\n };\n return createIterablePromise({\n func: (_) => {\n const resp = this.searchSynonyms(\n {\n indexName,\n searchSynonymsParams: {\n ...params,\n page: params.page\n }\n },\n requestOptions\n );\n params.page += 1;\n return resp;\n },\n validate: (response) => response.nbHits < params.hitsPerPage,\n ...browseSynonymsOptions\n });\n },\n /**\n * Helper: Chunks the given `objects` list in subset of 1000 elements max in order to make it fit in `batch` requests.\n *\n * @summary Helper: Chunks the given `objects` list in subset of 1000 elements max in order to make it fit in `batch` requests.\n * @param chunkedBatch - The `chunkedBatch` object.\n * @param chunkedBatch.indexName - The `indexName` to replace `objects` in.\n * @param chunkedBatch.objects - The array of `objects` to store in the given Algolia `indexName`.\n * @param chunkedBatch.action - The `batch` `action` to perform on the given array of `objects`, defaults to `addObject`.\n * @param chunkedBatch.waitForTasks - Whether or not we should wait until every `batch` tasks has been processed, this operation may slow the total execution time of this method but is more reliable.\n * @param chunkedBatch.batchSize - The size of the chunk of `objects`. The number of `batch` calls will be equal to `length(objects) / batchSize`. Defaults to 1000.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `getTask` method and merged with the transporter requestOptions.\n */\n async chunkedBatch({ indexName, objects, action = \"addObject\", waitForTasks, batchSize = 1e3 }, requestOptions) {\n let requests = [];\n const responses = [];\n const objectEntries = objects.entries();\n for (const [i, obj] of objectEntries) {\n requests.push({ action, body: obj });\n if (requests.length === batchSize || i === objects.length - 1) {\n responses.push(await this.batch({ indexName, batchWriteParams: { requests } }, requestOptions));\n requests = [];\n }\n }\n if (waitForTasks) {\n for (const resp of responses) {\n await this.waitForTask({ indexName, taskID: resp.taskID });\n }\n }\n return responses;\n },\n /**\n * Helper: Saves the given array of objects in the given index. The `chunkedBatch` helper is used under the hood, which creates a `batch` requests with at most 1000 objects in it.\n *\n * @summary Helper: Saves the given array of objects in the given index. The `chunkedBatch` helper is used under the hood, which creates a `batch` requests with at most 1000 objects in it.\n * @param saveObjects - The `saveObjects` object.\n * @param saveObjects.indexName - The `indexName` to save `objects` in.\n * @param saveObjects.objects - The array of `objects` to store in the given Algolia `indexName`.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `batch` method and merged with the transporter requestOptions.\n */\n async saveObjects({ indexName, objects }, requestOptions) {\n return await this.chunkedBatch({ indexName, objects, action: \"addObject\" }, requestOptions);\n },\n /**\n * Helper: Deletes every records for the given objectIDs. The `chunkedBatch` helper is used under the hood, which creates a `batch` requests with at most 1000 objectIDs in it.\n *\n * @summary Helper: Deletes every records for the given objectIDs. The `chunkedBatch` helper is used under the hood, which creates a `batch` requests with at most 1000 objectIDs in it.\n * @param deleteObjects - The `deleteObjects` object.\n * @param deleteObjects.indexName - The `indexName` to delete `objectIDs` from.\n * @param deleteObjects.objectIDs - The objectIDs to delete.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `batch` method and merged with the transporter requestOptions.\n */\n async deleteObjects({ indexName, objectIDs }, requestOptions) {\n return await this.chunkedBatch(\n {\n indexName,\n objects: objectIDs.map((objectID) => ({ objectID })),\n action: \"deleteObject\"\n },\n requestOptions\n );\n },\n /**\n * Helper: Replaces object content of all the given objects according to their respective `objectID` field. The `chunkedBatch` helper is used under the hood, which creates a `batch` requests with at most 1000 objects in it.\n *\n * @summary Helper: Replaces object content of all the given objects according to their respective `objectID` field. The `chunkedBatch` helper is used under the hood, which creates a `batch` requests with at most 1000 objects in it.\n * @param partialUpdateObjects - The `partialUpdateObjects` object.\n * @param partialUpdateObjects.indexName - The `indexName` to update `objects` in.\n * @param partialUpdateObjects.objects - The array of `objects` to update in the given Algolia `indexName`.\n * @param partialUpdateObjects.createIfNotExists - To be provided if non-existing objects are passed, otherwise, the call will fail..\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `getTask` method and merged with the transporter requestOptions.\n */\n async partialUpdateObjects({ indexName, objects, createIfNotExists }, requestOptions) {\n return await this.chunkedBatch(\n {\n indexName,\n objects,\n action: createIfNotExists ? \"partialUpdateObject\" : \"partialUpdateObjectNoCreate\"\n },\n requestOptions\n );\n },\n /**\n * Helper: Replaces all objects (records) in the given `index_name` with the given `objects`. A temporary index is created during this process in order to backup your data.\n * See https://api-clients-automation.netlify.app/docs/add-new-api-client#5-helpers for implementation details.\n *\n * @summary Helper: Replaces all objects (records) in the given `index_name` with the given `objects`. A temporary index is created during this process in order to backup your data.\n * @param replaceAllObjects - The `replaceAllObjects` object.\n * @param replaceAllObjects.indexName - The `indexName` to replace `objects` in.\n * @param replaceAllObjects.objects - The array of `objects` to store in the given Algolia `indexName`.\n * @param replaceAllObjects.batchSize - The size of the chunk of `objects`. The number of `batch` calls will be equal to `objects.length / batchSize`. Defaults to 1000.\n * @param requestOptions - The requestOptions to send along with the query, they will be forwarded to the `batch`, `operationIndex` and `getTask` method and merged with the transporter requestOptions.\n */\n async replaceAllObjects({ indexName, objects, batchSize }, requestOptions) {\n const randomSuffix = Math.floor(Math.random() * 1e6) + 1e5;\n const tmpIndexName = `${indexName}_tmp_${randomSuffix}`;\n let copyOperationResponse = await this.operationIndex(\n {\n indexName,\n operationIndexParams: {\n operation: \"copy\",\n destination: tmpIndexName,\n scope: [\"settings\", \"rules\", \"synonyms\"]\n }\n },\n requestOptions\n );\n const batchResponses = await this.chunkedBatch(\n { indexName: tmpIndexName, objects, waitForTasks: true, batchSize },\n requestOptions\n );\n await this.waitForTask({\n indexName: tmpIndexName,\n taskID: copyOperationResponse.taskID\n });\n copyOperationResponse = await this.operationIndex(\n {\n indexName,\n operationIndexParams: {\n operation: \"copy\",\n destination: tmpIndexName,\n scope: [\"settings\", \"rules\", \"synonyms\"]\n }\n },\n requestOptions\n );\n await this.waitForTask({\n indexName: tmpIndexName,\n taskID: copyOperationResponse.taskID\n });\n const moveOperationResponse = await this.operationIndex(\n {\n indexName: tmpIndexName,\n operationIndexParams: { operation: \"move\", destination: indexName }\n },\n requestOptions\n );\n await this.waitForTask({\n indexName: tmpIndexName,\n taskID: moveOperationResponse.taskID\n });\n return { copyOperationResponse, batchResponses, moveOperationResponse };\n },\n async indexExists({ indexName }) {\n try {\n await this.getSettings({ indexName });\n } catch (error) {\n if (error instanceof ApiError && error.status === 404) {\n return false;\n }\n throw error;\n }\n return true;\n },\n /**\n * Helper: calls the `search` method but with certainty that we will only request Algolia records (hits) and not facets.\n * Disclaimer: We don't assert that the parameters you pass to this method only contains `hits` requests to prevent impacting search performances, this helper is purely for typing purposes.\n *\n * @summary Search multiple indices for `hits`.\n * @param searchMethodParams - Query requests and strategies. Results will be received in the same order as the queries.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchForHits(searchMethodParams, requestOptions) {\n return this.search(searchMethodParams, requestOptions);\n },\n /**\n * Helper: calls the `search` method but with certainty that we will only request Algolia facets and not records (hits).\n * Disclaimer: We don't assert that the parameters you pass to this method only contains `facets` requests to prevent impacting search performances, this helper is purely for typing purposes.\n *\n * @summary Search multiple indices for `facets`.\n * @param searchMethodParams - Query requests and strategies. Results will be received in the same order as the queries.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchForFacets(searchMethodParams, requestOptions) {\n return this.search(searchMethodParams, requestOptions);\n },\n /**\n * Creates a new API key with specific permissions and restrictions.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param apiKey - The apiKey object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n addApiKey(apiKey, requestOptions) {\n if (!apiKey) {\n throw new Error(\"Parameter `apiKey` is required when calling `addApiKey`.\");\n }\n if (!apiKey.acl) {\n throw new Error(\"Parameter `apiKey.acl` is required when calling `addApiKey`.\");\n }\n const requestPath = \"/1/keys\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: apiKey\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * If a record with the specified object ID exists, the existing record is replaced. Otherwise, a new record is added to the index. To update _some_ attributes of an existing record, use the [`partial` operation](#tag/Records/operation/partialUpdateObject) instead. To add, update, or replace multiple records, use the [`batch` operation](#tag/Records/operation/batch).\n *\n * Required API Key ACLs:\n * - addObject.\n *\n * @param addOrUpdateObject - The addOrUpdateObject object.\n * @param addOrUpdateObject.indexName - Name of the index on which to perform the operation.\n * @param addOrUpdateObject.objectID - Unique record identifier.\n * @param addOrUpdateObject.body - The record, a schemaless object with attributes that are useful in the context of search and discovery.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n addOrUpdateObject({ indexName, objectID, body }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `addOrUpdateObject`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `addOrUpdateObject`.\");\n }\n if (!body) {\n throw new Error(\"Parameter `body` is required when calling `addOrUpdateObject`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Adds a source to the list of allowed sources.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param source - Source to add.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n appendSource(source, requestOptions) {\n if (!source) {\n throw new Error(\"Parameter `source` is required when calling `appendSource`.\");\n }\n if (!source.source) {\n throw new Error(\"Parameter `source.source` is required when calling `appendSource`.\");\n }\n const requestPath = \"/1/security/sources/append\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: source\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Assigns or moves a user ID to a cluster. The time it takes to move a user is proportional to the amount of data linked to the user ID.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param assignUserId - The assignUserId object.\n * @param assignUserId.xAlgoliaUserID - Unique identifier of the user who makes the search request.\n * @param assignUserId.assignUserIdParams - The assignUserIdParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n assignUserId({ xAlgoliaUserID, assignUserIdParams }, requestOptions) {\n if (!xAlgoliaUserID) {\n throw new Error(\"Parameter `xAlgoliaUserID` is required when calling `assignUserId`.\");\n }\n if (!assignUserIdParams) {\n throw new Error(\"Parameter `assignUserIdParams` is required when calling `assignUserId`.\");\n }\n if (!assignUserIdParams.cluster) {\n throw new Error(\"Parameter `assignUserIdParams.cluster` is required when calling `assignUserId`.\");\n }\n const requestPath = \"/1/clusters/mapping\";\n const headers = {};\n const queryParameters = {};\n if (xAlgoliaUserID !== void 0) {\n headers[\"X-Algolia-User-ID\"] = xAlgoliaUserID.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: assignUserIdParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Adds, updates, or deletes records in one index with a single API request. Batching index updates reduces latency and increases data integrity. - Actions are applied in the order they\\'re specified. - Actions are equivalent to the individual API requests of the same name.\n *\n * @param batch - The batch object.\n * @param batch.indexName - Name of the index on which to perform the operation.\n * @param batch.batchWriteParams - The batchWriteParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n batch({ indexName, batchWriteParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `batch`.\");\n }\n if (!batchWriteParams) {\n throw new Error(\"Parameter `batchWriteParams` is required when calling `batch`.\");\n }\n if (!batchWriteParams.requests) {\n throw new Error(\"Parameter `batchWriteParams.requests` is required when calling `batch`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/batch\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: batchWriteParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Assigns multiple user IDs to a cluster. **You can\\'t move users with this operation**.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param batchAssignUserIds - The batchAssignUserIds object.\n * @param batchAssignUserIds.xAlgoliaUserID - Unique identifier of the user who makes the search request.\n * @param batchAssignUserIds.batchAssignUserIdsParams - The batchAssignUserIdsParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n batchAssignUserIds({ xAlgoliaUserID, batchAssignUserIdsParams }, requestOptions) {\n if (!xAlgoliaUserID) {\n throw new Error(\"Parameter `xAlgoliaUserID` is required when calling `batchAssignUserIds`.\");\n }\n if (!batchAssignUserIdsParams) {\n throw new Error(\"Parameter `batchAssignUserIdsParams` is required when calling `batchAssignUserIds`.\");\n }\n if (!batchAssignUserIdsParams.cluster) {\n throw new Error(\"Parameter `batchAssignUserIdsParams.cluster` is required when calling `batchAssignUserIds`.\");\n }\n if (!batchAssignUserIdsParams.users) {\n throw new Error(\"Parameter `batchAssignUserIdsParams.users` is required when calling `batchAssignUserIds`.\");\n }\n const requestPath = \"/1/clusters/mapping/batch\";\n const headers = {};\n const queryParameters = {};\n if (xAlgoliaUserID !== void 0) {\n headers[\"X-Algolia-User-ID\"] = xAlgoliaUserID.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: batchAssignUserIdsParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Adds or deletes multiple entries from your plurals, segmentation, or stop word dictionaries.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param batchDictionaryEntries - The batchDictionaryEntries object.\n * @param batchDictionaryEntries.dictionaryName - Dictionary type in which to search.\n * @param batchDictionaryEntries.batchDictionaryEntriesParams - The batchDictionaryEntriesParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n batchDictionaryEntries({ dictionaryName, batchDictionaryEntriesParams }, requestOptions) {\n if (!dictionaryName) {\n throw new Error(\"Parameter `dictionaryName` is required when calling `batchDictionaryEntries`.\");\n }\n if (!batchDictionaryEntriesParams) {\n throw new Error(\"Parameter `batchDictionaryEntriesParams` is required when calling `batchDictionaryEntries`.\");\n }\n if (!batchDictionaryEntriesParams.requests) {\n throw new Error(\n \"Parameter `batchDictionaryEntriesParams.requests` is required when calling `batchDictionaryEntries`.\"\n );\n }\n const requestPath = \"/1/dictionaries/{dictionaryName}/batch\".replace(\n \"{dictionaryName}\",\n encodeURIComponent(dictionaryName)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: batchDictionaryEntriesParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves records from an index, up to 1,000 per request. While searching retrieves _hits_ (records augmented with attributes for highlighting and ranking details), browsing _just_ returns matching records. This can be useful if you want to export your indices. - The Analytics API doesn\\'t collect data when using `browse`. - Records are ranked by attributes and custom ranking. - There\\'s no ranking for: typo-tolerance, number of matched words, proximity, geo distance. Browse requests automatically apply these settings: - `advancedSyntax`: `false` - `attributesToHighlight`: `[]` - `attributesToSnippet`: `[]` - `distinct`: `false` - `enablePersonalization`: `false` - `enableRules`: `false` - `facets`: `[]` - `getRankingInfo`: `false` - `ignorePlurals`: `false` - `optionalFilters`: `[]` - `typoTolerance`: `true` or `false` (`min` and `strict` is evaluated to `true`) If you send these parameters with your browse requests, they\\'ll be ignored.\n *\n * Required API Key ACLs:\n * - browse.\n *\n * @param browse - The browse object.\n * @param browse.indexName - Name of the index on which to perform the operation.\n * @param browse.browseParams - The browseParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n browse({ indexName, browseParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `browse`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/browse\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: browseParams ? browseParams : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes only the records from an index while keeping settings, synonyms, and rules.\n *\n * Required API Key ACLs:\n * - deleteIndex.\n *\n * @param clearObjects - The clearObjects object.\n * @param clearObjects.indexName - Name of the index on which to perform the operation.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n clearObjects({ indexName }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `clearObjects`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/clear\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes all rules from the index.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param clearRules - The clearRules object.\n * @param clearRules.indexName - Name of the index on which to perform the operation.\n * @param clearRules.forwardToReplicas - Whether changes are applied to replica indices.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n clearRules({ indexName, forwardToReplicas }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `clearRules`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/rules/clear\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes all synonyms from the index.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param clearSynonyms - The clearSynonyms object.\n * @param clearSynonyms.indexName - Name of the index on which to perform the operation.\n * @param clearSynonyms.forwardToReplicas - Whether changes are applied to replica indices.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n clearSynonyms({ indexName, forwardToReplicas }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `clearSynonyms`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/synonyms/clear\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n *\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n *\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n *\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n *\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes the API key.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param deleteApiKey - The deleteApiKey object.\n * @param deleteApiKey.key - API key.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteApiKey({ key }, requestOptions) {\n if (!key) {\n throw new Error(\"Parameter `key` is required when calling `deleteApiKey`.\");\n }\n const requestPath = \"/1/keys/{key}\".replace(\"{key}\", encodeURIComponent(key));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This operation doesn\\'t accept empty queries or filters. It\\'s more efficient to get a list of object IDs with the [`browse` operation](#tag/Search/operation/browse), and then delete the records using the [`batch` operation](#tag/Records/operation/batch).\n *\n * Required API Key ACLs:\n * - deleteIndex.\n *\n * @param deleteBy - The deleteBy object.\n * @param deleteBy.indexName - Name of the index on which to perform the operation.\n * @param deleteBy.deleteByParams - The deleteByParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteBy({ indexName, deleteByParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `deleteBy`.\");\n }\n if (!deleteByParams) {\n throw new Error(\"Parameter `deleteByParams` is required when calling `deleteBy`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/deleteByQuery\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: deleteByParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes an index and all its settings. - Deleting an index doesn\\'t delete its analytics data. - If you try to delete a non-existing index, the operation is ignored without warning. - If the index you want to delete has replica indices, the replicas become independent indices. - If the index you want to delete is a replica index, you must first unlink it from its primary index before you can delete it. For more information, see [Delete replica indices](https://www.algolia.com/doc/guides/managing-results/refine-results/sorting/how-to/deleting-replicas/).\n *\n * Required API Key ACLs:\n * - deleteIndex.\n *\n * @param deleteIndex - The deleteIndex object.\n * @param deleteIndex.indexName - Name of the index on which to perform the operation.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteIndex({ indexName }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `deleteIndex`.\");\n }\n const requestPath = \"/1/indexes/{indexName}\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a record by its object ID. To delete more than one record, use the [`batch` operation](#tag/Records/operation/batch). To delete records matching a query, use the [`deleteByQuery` operation](#tag/Records/operation/deleteBy).\n *\n * Required API Key ACLs:\n * - deleteObject.\n *\n * @param deleteObject - The deleteObject object.\n * @param deleteObject.indexName - Name of the index on which to perform the operation.\n * @param deleteObject.objectID - Unique record identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteObject({ indexName, objectID }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `deleteObject`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `deleteObject`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a rule by its ID. To find the object ID for rules, use the [`search` operation](#tag/Rules/operation/searchRules).\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param deleteRule - The deleteRule object.\n * @param deleteRule.indexName - Name of the index on which to perform the operation.\n * @param deleteRule.objectID - Unique identifier of a rule object.\n * @param deleteRule.forwardToReplicas - Whether changes are applied to replica indices.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteRule({ indexName, objectID, forwardToReplicas }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `deleteRule`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `deleteRule`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/rules/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a source from the list of allowed sources.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param deleteSource - The deleteSource object.\n * @param deleteSource.source - IP address range of the source.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteSource({ source }, requestOptions) {\n if (!source) {\n throw new Error(\"Parameter `source` is required when calling `deleteSource`.\");\n }\n const requestPath = \"/1/security/sources/{source}\".replace(\"{source}\", encodeURIComponent(source));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a synonym by its ID. To find the object IDs of your synonyms, use the [`search` operation](#tag/Synonyms/operation/searchSynonyms).\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param deleteSynonym - The deleteSynonym object.\n * @param deleteSynonym.indexName - Name of the index on which to perform the operation.\n * @param deleteSynonym.objectID - Unique identifier of a synonym object.\n * @param deleteSynonym.forwardToReplicas - Whether changes are applied to replica indices.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteSynonym({ indexName, objectID, forwardToReplicas }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `deleteSynonym`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `deleteSynonym`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/synonyms/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Gets the permissions and restrictions of an API key. When authenticating with the admin API key, you can request information for any of your application\\'s keys. When authenticating with other API keys, you can only retrieve information for that key.\n *\n * @param getApiKey - The getApiKey object.\n * @param getApiKey.key - API key.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getApiKey({ key }, requestOptions) {\n if (!key) {\n throw new Error(\"Parameter `key` is required when calling `getApiKey`.\");\n }\n const requestPath = \"/1/keys/{key}\".replace(\"{key}\", encodeURIComponent(key));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Checks the status of a given application task.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param getAppTask - The getAppTask object.\n * @param getAppTask.taskID - Unique task identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getAppTask({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `getAppTask`.\");\n }\n const requestPath = \"/1/task/{taskID}\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Lists supported languages with their supported dictionary types and number of custom entries.\n *\n * Required API Key ACLs:\n * - settings.\n *\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getDictionaryLanguages(requestOptions) {\n const requestPath = \"/1/dictionaries/*/languages\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the languages for which standard dictionary entries are turned off.\n *\n * Required API Key ACLs:\n * - settings.\n *\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getDictionarySettings(requestOptions) {\n const requestPath = \"/1/dictionaries/*/settings\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * The request must be authenticated by an API key with the [`logs` ACL](https://www.algolia.com/doc/guides/security/api-keys/#access-control-list-acl). - Logs are held for the last seven days. - Up to 1,000 API requests per server are logged. - This request counts towards your [operations quota](https://support.algolia.com/hc/en-us/articles/4406981829777-How-does-Algolia-count-records-and-operations-) but doesn\\'t appear in the logs itself.\n *\n * Required API Key ACLs:\n * - logs.\n *\n * @param getLogs - The getLogs object.\n * @param getLogs.offset - First log entry to retrieve. The most recent entries are listed first.\n * @param getLogs.length - Maximum number of entries to retrieve.\n * @param getLogs.indexName - Index for which to retrieve log entries. By default, log entries are retrieved for all indices.\n * @param getLogs.type - Type of log entries to retrieve. By default, all log entries are retrieved.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getLogs({ offset, length, indexName, type } = {}, requestOptions = void 0) {\n const requestPath = \"/1/logs\";\n const headers = {};\n const queryParameters = {};\n if (offset !== void 0) {\n queryParameters.offset = offset.toString();\n }\n if (length !== void 0) {\n queryParameters.length = length.toString();\n }\n if (indexName !== void 0) {\n queryParameters.indexName = indexName.toString();\n }\n if (type !== void 0) {\n queryParameters.type = type.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves one record by its object ID. To retrieve more than one record, use the [`objects` operation](#tag/Records/operation/getObjects).\n *\n * Required API Key ACLs:\n * - search.\n *\n * @param getObject - The getObject object.\n * @param getObject.indexName - Name of the index on which to perform the operation.\n * @param getObject.objectID - Unique record identifier.\n * @param getObject.attributesToRetrieve - Attributes to include with the records in the response. This is useful to reduce the size of the API response. By default, all retrievable attributes are returned. `objectID` is always retrieved. Attributes included in `unretrievableAttributes` won\\'t be retrieved unless the request is authenticated with the admin API key.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getObject({ indexName, objectID, attributesToRetrieve }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getObject`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `getObject`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n if (attributesToRetrieve !== void 0) {\n queryParameters.attributesToRetrieve = attributesToRetrieve.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves one or more records, potentially from different indices. Records are returned in the same order as the requests.\n *\n * Required API Key ACLs:\n * - search.\n *\n * @param getObjectsParams - Request object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getObjects(getObjectsParams, requestOptions) {\n if (!getObjectsParams) {\n throw new Error(\"Parameter `getObjectsParams` is required when calling `getObjects`.\");\n }\n if (!getObjectsParams.requests) {\n throw new Error(\"Parameter `getObjectsParams.requests` is required when calling `getObjects`.\");\n }\n const requestPath = \"/1/indexes/*/objects\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: getObjectsParams,\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a rule by its ID. To find the object ID of rules, use the [`search` operation](#tag/Rules/operation/searchRules).\n *\n * Required API Key ACLs:\n * - settings.\n *\n * @param getRule - The getRule object.\n * @param getRule.indexName - Name of the index on which to perform the operation.\n * @param getRule.objectID - Unique identifier of a rule object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getRule({ indexName, objectID }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getRule`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `getRule`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/rules/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves an object with non-null index settings.\n *\n * Required API Key ACLs:\n * - search.\n *\n * @param getSettings - The getSettings object.\n * @param getSettings.indexName - Name of the index on which to perform the operation.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getSettings({ indexName }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getSettings`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/settings\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves all allowed IP addresses with access to your application.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getSources(requestOptions) {\n const requestPath = \"/1/security/sources\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a syonym by its ID. To find the object IDs for your synonyms, use the [`search` operation](#tag/Synonyms/operation/searchSynonyms).\n *\n * Required API Key ACLs:\n * - settings.\n *\n * @param getSynonym - The getSynonym object.\n * @param getSynonym.indexName - Name of the index on which to perform the operation.\n * @param getSynonym.objectID - Unique identifier of a synonym object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getSynonym({ indexName, objectID }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getSynonym`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `getSynonym`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/synonyms/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Checks the status of a given task. Indexing tasks are asynchronous. When you add, update, or delete records or indices, a task is created on a queue and completed depending on the load on the server. The indexing tasks\\' responses include a task ID that you can use to check the status.\n *\n * Required API Key ACLs:\n * - addObject.\n *\n * @param getTask - The getTask object.\n * @param getTask.indexName - Name of the index on which to perform the operation.\n * @param getTask.taskID - Unique task identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTask({ indexName, taskID }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getTask`.\");\n }\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `getTask`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/task/{taskID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Get the IDs of the 10 users with the highest number of records per cluster. Since it can take a few seconds to get the data from the different clusters, the response isn\\'t real-time.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTopUserIds(requestOptions) {\n const requestPath = \"/1/clusters/mapping/top\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Returns the user ID data stored in the mapping. Since it can take a few seconds to get the data from the different clusters, the response isn\\'t real-time.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param getUserId - The getUserId object.\n * @param getUserId.userID - Unique identifier of the user who makes the search request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getUserId({ userID }, requestOptions) {\n if (!userID) {\n throw new Error(\"Parameter `userID` is required when calling `getUserId`.\");\n }\n const requestPath = \"/1/clusters/mapping/{userID}\".replace(\"{userID}\", encodeURIComponent(userID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * To determine when the time-consuming process of creating a large batch of users or migrating users from one cluster to another is complete, this operation retrieves the status of the process.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param hasPendingMappings - The hasPendingMappings object.\n * @param hasPendingMappings.getClusters - Whether to include the cluster\\'s pending mapping state in the response.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n hasPendingMappings({ getClusters } = {}, requestOptions = void 0) {\n const requestPath = \"/1/clusters/mapping/pending\";\n const headers = {};\n const queryParameters = {};\n if (getClusters !== void 0) {\n queryParameters.getClusters = getClusters.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Lists all API keys associated with your Algolia application, including their permissions and restrictions.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listApiKeys(requestOptions) {\n const requestPath = \"/1/keys\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Lists the available clusters in a multi-cluster setup.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listClusters(requestOptions) {\n const requestPath = \"/1/clusters\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Lists all indices in the current Algolia application. The request follows any index restrictions of the API key you use to make the request.\n *\n * Required API Key ACLs:\n * - listIndexes.\n *\n * @param listIndices - The listIndices object.\n * @param listIndices.page - Requested page of the API response. If `null`, the API response is not paginated.\n * @param listIndices.hitsPerPage - Number of hits per page.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listIndices({ page, hitsPerPage } = {}, requestOptions = void 0) {\n const requestPath = \"/1/indexes\";\n const headers = {};\n const queryParameters = {};\n if (page !== void 0) {\n queryParameters.page = page.toString();\n }\n if (hitsPerPage !== void 0) {\n queryParameters.hitsPerPage = hitsPerPage.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Lists the userIDs assigned to a multi-cluster application. Since it can take a few seconds to get the data from the different clusters, the response isn\\'t real-time.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param listUserIds - The listUserIds object.\n * @param listUserIds.page - Requested page of the API response. If `null`, the API response is not paginated.\n * @param listUserIds.hitsPerPage - Number of hits per page.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listUserIds({ page, hitsPerPage } = {}, requestOptions = void 0) {\n const requestPath = \"/1/clusters/mapping\";\n const headers = {};\n const queryParameters = {};\n if (page !== void 0) {\n queryParameters.page = page.toString();\n }\n if (hitsPerPage !== void 0) {\n queryParameters.hitsPerPage = hitsPerPage.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Adds, updates, or deletes records in multiple indices with a single API request. - Actions are applied in the order they are specified. - Actions are equivalent to the individual API requests of the same name.\n *\n * @param batchParams - The batchParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n multipleBatch(batchParams, requestOptions) {\n if (!batchParams) {\n throw new Error(\"Parameter `batchParams` is required when calling `multipleBatch`.\");\n }\n if (!batchParams.requests) {\n throw new Error(\"Parameter `batchParams.requests` is required when calling `multipleBatch`.\");\n }\n const requestPath = \"/1/indexes/*/batch\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: batchParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Copies or moves (renames) an index within the same Algolia application. - Existing destination indices are overwritten, except for their analytics data. - If the destination index doesn\\'t exist yet, it\\'ll be created. **Copy** - Copying a source index that doesn\\'t exist creates a new index with 0 records and default settings. - The API keys of the source index are merged with the existing keys in the destination index. - You can\\'t copy the `enableReRanking`, `mode`, and `replicas` settings. - You can\\'t copy to a destination index that already has replicas. - Be aware of the [size limits](https://www.algolia.com/doc/guides/scaling/algolia-service-limits/#application-record-and-index-limits). - Related guide: [Copy indices](https://www.algolia.com/doc/guides/sending-and-managing-data/manage-indices-and-apps/manage-indices/how-to/copy-indices/) **Move** - Moving a source index that doesn\\'t exist is ignored without returning an error. - When moving an index, the analytics data keep their original name and a new set of analytics data is started for the new name. To access the original analytics in the dashboard, create an index with the original name. - If the destination index has replicas, moving will overwrite the existing index and copy the data to the replica indices. - Related guide: [Move indices](https://www.algolia.com/doc/guides/sending-and-managing-data/manage-indices-and-apps/manage-indices/how-to/move-indices/).\n *\n * Required API Key ACLs:\n * - addObject.\n *\n * @param operationIndex - The operationIndex object.\n * @param operationIndex.indexName - Name of the index on which to perform the operation.\n * @param operationIndex.operationIndexParams - The operationIndexParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n operationIndex({ indexName, operationIndexParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `operationIndex`.\");\n }\n if (!operationIndexParams) {\n throw new Error(\"Parameter `operationIndexParams` is required when calling `operationIndex`.\");\n }\n if (!operationIndexParams.operation) {\n throw new Error(\"Parameter `operationIndexParams.operation` is required when calling `operationIndex`.\");\n }\n if (!operationIndexParams.destination) {\n throw new Error(\"Parameter `operationIndexParams.destination` is required when calling `operationIndex`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/operation\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: operationIndexParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Adds new attributes to a record, or update existing ones. - If a record with the specified object ID doesn\\'t exist, a new record is added to the index **if** `createIfNotExists` is true. - If the index doesn\\'t exist yet, this method creates a new index. - You can use any first-level attribute but not nested attributes. If you specify a nested attribute, the engine treats it as a replacement for its first-level ancestor. To update an attribute without pushing the entire record, you can use these built-in operations. These operations can be helpful if you don\\'t have access to your initial data. - Increment: increment a numeric attribute - Decrement: decrement a numeric attribute - Add: append a number or string element to an array attribute - Remove: remove all matching number or string elements from an array attribute made of numbers or strings - AddUnique: add a number or string element to an array attribute made of numbers or strings only if it\\'s not already present - IncrementFrom: increment a numeric integer attribute only if the provided value matches the current value, and otherwise ignore the whole object update. For example, if you pass an IncrementFrom value of 2 for the version attribute, but the current value of the attribute is 1, the engine ignores the update. If the object doesn\\'t exist, the engine only creates it if you pass an IncrementFrom value of 0. - IncrementSet: increment a numeric integer attribute only if the provided value is greater than the current value, and otherwise ignore the whole object update. For example, if you pass an IncrementSet value of 2 for the version attribute, and the current value of the attribute is 1, the engine updates the object. If the object doesn\\'t exist yet, the engine only creates it if you pass an IncrementSet value that\\'s greater than 0. You can specify an operation by providing an object with the attribute to update as the key and its value being an object with the following properties: - _operation: the operation to apply on the attribute - value: the right-hand side argument to the operation, for example, increment or decrement step, value to add or remove.\n *\n * Required API Key ACLs:\n * - addObject.\n *\n * @param partialUpdateObject - The partialUpdateObject object.\n * @param partialUpdateObject.indexName - Name of the index on which to perform the operation.\n * @param partialUpdateObject.objectID - Unique record identifier.\n * @param partialUpdateObject.attributesToUpdate - Attributes with their values.\n * @param partialUpdateObject.createIfNotExists - Whether to create a new record if it doesn\\'t exist.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n partialUpdateObject({ indexName, objectID, attributesToUpdate, createIfNotExists }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `partialUpdateObject`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `partialUpdateObject`.\");\n }\n if (!attributesToUpdate) {\n throw new Error(\"Parameter `attributesToUpdate` is required when calling `partialUpdateObject`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{objectID}/partial\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n if (createIfNotExists !== void 0) {\n queryParameters.createIfNotExists = createIfNotExists.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: attributesToUpdate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a user ID and its associated data from the clusters.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param removeUserId - The removeUserId object.\n * @param removeUserId.userID - Unique identifier of the user who makes the search request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n removeUserId({ userID }, requestOptions) {\n if (!userID) {\n throw new Error(\"Parameter `userID` is required when calling `removeUserId`.\");\n }\n const requestPath = \"/1/clusters/mapping/{userID}\".replace(\"{userID}\", encodeURIComponent(userID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Replaces the list of allowed sources.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param replaceSources - The replaceSources object.\n * @param replaceSources.source - Allowed sources.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n replaceSources({ source }, requestOptions) {\n if (!source) {\n throw new Error(\"Parameter `source` is required when calling `replaceSources`.\");\n }\n const requestPath = \"/1/security/sources\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: source\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Restores a deleted API key. Restoring resets the `validity` attribute to `0`. Algolia stores up to 1,000 API keys per application. If you create more, the oldest API keys are deleted and can\\'t be restored.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param restoreApiKey - The restoreApiKey object.\n * @param restoreApiKey.key - API key.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n restoreApiKey({ key }, requestOptions) {\n if (!key) {\n throw new Error(\"Parameter `key` is required when calling `restoreApiKey`.\");\n }\n const requestPath = \"/1/keys/{key}/restore\".replace(\"{key}\", encodeURIComponent(key));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Adds a record to an index or replace it. - If the record doesn\\'t have an object ID, a new record with an auto-generated object ID is added to your index. - If a record with the specified object ID exists, the existing record is replaced. - If a record with the specified object ID doesn\\'t exist, a new record is added to your index. - If you add a record to an index that doesn\\'t exist yet, a new index is created. To update _some_ attributes of a record, use the [`partial` operation](#tag/Records/operation/partialUpdateObject). To add, update, or replace multiple records, use the [`batch` operation](#tag/Records/operation/batch).\n *\n * Required API Key ACLs:\n * - addObject.\n *\n * @param saveObject - The saveObject object.\n * @param saveObject.indexName - Name of the index on which to perform the operation.\n * @param saveObject.body - The record, a schemaless object with attributes that are useful in the context of search and discovery.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n saveObject({ indexName, body }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `saveObject`.\");\n }\n if (!body) {\n throw new Error(\"Parameter `body` is required when calling `saveObject`.\");\n }\n const requestPath = \"/1/indexes/{indexName}\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * If a rule with the specified object ID doesn\\'t exist, it\\'s created. Otherwise, the existing rule is replaced. To create or update more than one rule, use the [`batch` operation](#tag/Rules/operation/saveRules).\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param saveRule - The saveRule object.\n * @param saveRule.indexName - Name of the index on which to perform the operation.\n * @param saveRule.objectID - Unique identifier of a rule object.\n * @param saveRule.rule - The rule object.\n * @param saveRule.forwardToReplicas - Whether changes are applied to replica indices.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n saveRule({ indexName, objectID, rule, forwardToReplicas }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `saveRule`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `saveRule`.\");\n }\n if (!rule) {\n throw new Error(\"Parameter `rule` is required when calling `saveRule`.\");\n }\n if (!rule.objectID) {\n throw new Error(\"Parameter `rule.objectID` is required when calling `saveRule`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/rules/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: rule\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Create or update multiple rules. If a rule with the specified object ID doesn\\'t exist, Algolia creates a new one. Otherwise, existing rules are replaced.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param saveRules - The saveRules object.\n * @param saveRules.indexName - Name of the index on which to perform the operation.\n * @param saveRules.rules - The rules object.\n * @param saveRules.forwardToReplicas - Whether changes are applied to replica indices.\n * @param saveRules.clearExistingRules - Whether existing rules should be deleted before adding this batch.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n saveRules({ indexName, rules, forwardToReplicas, clearExistingRules }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `saveRules`.\");\n }\n if (!rules) {\n throw new Error(\"Parameter `rules` is required when calling `saveRules`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/rules/batch\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n if (clearExistingRules !== void 0) {\n queryParameters.clearExistingRules = clearExistingRules.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: rules\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * If a synonym with the specified object ID doesn\\'t exist, Algolia adds a new one. Otherwise, the existing synonym is replaced. To add multiple synonyms in a single API request, use the [`batch` operation](#tag/Synonyms/operation/saveSynonyms).\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param saveSynonym - The saveSynonym object.\n * @param saveSynonym.indexName - Name of the index on which to perform the operation.\n * @param saveSynonym.objectID - Unique identifier of a synonym object.\n * @param saveSynonym.synonymHit - The synonymHit object.\n * @param saveSynonym.forwardToReplicas - Whether changes are applied to replica indices.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n saveSynonym({ indexName, objectID, synonymHit, forwardToReplicas }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `saveSynonym`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `saveSynonym`.\");\n }\n if (!synonymHit) {\n throw new Error(\"Parameter `synonymHit` is required when calling `saveSynonym`.\");\n }\n if (!synonymHit.objectID) {\n throw new Error(\"Parameter `synonymHit.objectID` is required when calling `saveSynonym`.\");\n }\n if (!synonymHit.type) {\n throw new Error(\"Parameter `synonymHit.type` is required when calling `saveSynonym`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/synonyms/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: synonymHit\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * If a synonym with the `objectID` doesn\\'t exist, Algolia adds a new one. Otherwise, existing synonyms are replaced.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param saveSynonyms - The saveSynonyms object.\n * @param saveSynonyms.indexName - Name of the index on which to perform the operation.\n * @param saveSynonyms.synonymHit - The synonymHit object.\n * @param saveSynonyms.forwardToReplicas - Whether changes are applied to replica indices.\n * @param saveSynonyms.replaceExistingSynonyms - Whether to replace all synonyms in the index with the ones sent with this request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n saveSynonyms({ indexName, synonymHit, forwardToReplicas, replaceExistingSynonyms }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `saveSynonyms`.\");\n }\n if (!synonymHit) {\n throw new Error(\"Parameter `synonymHit` is required when calling `saveSynonyms`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/synonyms/batch\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n if (replaceExistingSynonyms !== void 0) {\n queryParameters.replaceExistingSynonyms = replaceExistingSynonyms.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: synonymHit\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Sends multiple search request to one or more indices. This can be useful in these cases: - Different indices for different purposes, such as, one index for products, another one for marketing content. - Multiple searches to the same index—for example, with different filters.\n *\n * Required API Key ACLs:\n * - search.\n *\n * @param searchMethodParams - Muli-search request body. Results are returned in the same order as the requests.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n search(searchMethodParams, requestOptions) {\n if (searchMethodParams && Array.isArray(searchMethodParams)) {\n const newSignatureRequest = {\n requests: searchMethodParams.map(({ params, ...legacyRequest }) => {\n if (legacyRequest.type === \"facet\") {\n return {\n ...legacyRequest,\n ...params,\n type: \"facet\"\n };\n }\n return {\n ...legacyRequest,\n ...params,\n facet: void 0,\n maxFacetHits: void 0,\n facetQuery: void 0\n };\n })\n };\n searchMethodParams = newSignatureRequest;\n }\n if (!searchMethodParams) {\n throw new Error(\"Parameter `searchMethodParams` is required when calling `search`.\");\n }\n if (!searchMethodParams.requests) {\n throw new Error(\"Parameter `searchMethodParams.requests` is required when calling `search`.\");\n }\n const requestPath = \"/1/indexes/*/queries\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchMethodParams,\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for standard and custom dictionary entries.\n *\n * Required API Key ACLs:\n * - settings.\n *\n * @param searchDictionaryEntries - The searchDictionaryEntries object.\n * @param searchDictionaryEntries.dictionaryName - Dictionary type in which to search.\n * @param searchDictionaryEntries.searchDictionaryEntriesParams - The searchDictionaryEntriesParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchDictionaryEntries({ dictionaryName, searchDictionaryEntriesParams }, requestOptions) {\n if (!dictionaryName) {\n throw new Error(\"Parameter `dictionaryName` is required when calling `searchDictionaryEntries`.\");\n }\n if (!searchDictionaryEntriesParams) {\n throw new Error(\n \"Parameter `searchDictionaryEntriesParams` is required when calling `searchDictionaryEntries`.\"\n );\n }\n if (!searchDictionaryEntriesParams.query) {\n throw new Error(\n \"Parameter `searchDictionaryEntriesParams.query` is required when calling `searchDictionaryEntries`.\"\n );\n }\n const requestPath = \"/1/dictionaries/{dictionaryName}/search\".replace(\n \"{dictionaryName}\",\n encodeURIComponent(dictionaryName)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchDictionaryEntriesParams,\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for values of a specified facet attribute. - By default, facet values are sorted by decreasing count. You can adjust this with the `sortFacetValueBy` parameter. - Searching for facet values doesn\\'t work if you have **more than 65 searchable facets and searchable attributes combined**.\n *\n * Required API Key ACLs:\n * - search.\n *\n * @param searchForFacetValues - The searchForFacetValues object.\n * @param searchForFacetValues.indexName - Name of the index on which to perform the operation.\n * @param searchForFacetValues.facetName - Facet attribute in which to search for values. This attribute must be included in the `attributesForFaceting` index setting with the `searchable()` modifier.\n * @param searchForFacetValues.searchForFacetValuesRequest - The searchForFacetValuesRequest object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchForFacetValues({ indexName, facetName, searchForFacetValuesRequest }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `searchForFacetValues`.\");\n }\n if (!facetName) {\n throw new Error(\"Parameter `facetName` is required when calling `searchForFacetValues`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/facets/{facetName}/query\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{facetName}\", encodeURIComponent(facetName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchForFacetValuesRequest ? searchForFacetValuesRequest : {},\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for rules in your index.\n *\n * Required API Key ACLs:\n * - settings.\n *\n * @param searchRules - The searchRules object.\n * @param searchRules.indexName - Name of the index on which to perform the operation.\n * @param searchRules.searchRulesParams - The searchRulesParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchRules({ indexName, searchRulesParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `searchRules`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/rules/search\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchRulesParams ? searchRulesParams : {},\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches a single index and return matching search results (_hits_). This method lets you retrieve up to 1,000 hits. If you need more, use the [`browse` operation](#tag/Search/operation/browse) or increase the `paginatedLimitedTo` index setting.\n *\n * Required API Key ACLs:\n * - search.\n *\n * @param searchSingleIndex - The searchSingleIndex object.\n * @param searchSingleIndex.indexName - Name of the index on which to perform the operation.\n * @param searchSingleIndex.searchParams - The searchParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchSingleIndex({ indexName, searchParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `searchSingleIndex`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/query\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchParams ? searchParams : {},\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for synonyms in your index.\n *\n * Required API Key ACLs:\n * - settings.\n *\n * @param searchSynonyms - The searchSynonyms object.\n * @param searchSynonyms.indexName - Name of the index on which to perform the operation.\n * @param searchSynonyms.searchSynonymsParams - Body of the `searchSynonyms` operation.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchSynonyms({ indexName, searchSynonymsParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `searchSynonyms`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/synonyms/search\".replace(\n \"{indexName}\",\n encodeURIComponent(indexName)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchSynonymsParams ? searchSynonymsParams : {},\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Since it can take a few seconds to get the data from the different clusters, the response isn\\'t real-time. To ensure rapid updates, the user IDs index isn\\'t built at the same time as the mapping. Instead, it\\'s built every 12 hours, at the same time as the update of user ID usage. For example, if you add or move a user ID, the search will show an old value until the next time the mapping is rebuilt (every 12 hours).\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param searchUserIdsParams - The searchUserIdsParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchUserIds(searchUserIdsParams, requestOptions) {\n if (!searchUserIdsParams) {\n throw new Error(\"Parameter `searchUserIdsParams` is required when calling `searchUserIds`.\");\n }\n if (!searchUserIdsParams.query) {\n throw new Error(\"Parameter `searchUserIdsParams.query` is required when calling `searchUserIds`.\");\n }\n const requestPath = \"/1/clusters/mapping/search\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchUserIdsParams,\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Turns standard stop word dictionary entries on or off for a given language.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param dictionarySettingsParams - The dictionarySettingsParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n setDictionarySettings(dictionarySettingsParams, requestOptions) {\n if (!dictionarySettingsParams) {\n throw new Error(\"Parameter `dictionarySettingsParams` is required when calling `setDictionarySettings`.\");\n }\n if (!dictionarySettingsParams.disableStandardEntries) {\n throw new Error(\n \"Parameter `dictionarySettingsParams.disableStandardEntries` is required when calling `setDictionarySettings`.\"\n );\n }\n const requestPath = \"/1/dictionaries/*/settings\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: dictionarySettingsParams\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Update the specified index settings. Index settings that you don\\'t specify are left unchanged. Specify `null` to reset a setting to its default value. For best performance, update the index settings before you add new records to your index.\n *\n * Required API Key ACLs:\n * - editSettings.\n *\n * @param setSettings - The setSettings object.\n * @param setSettings.indexName - Name of the index on which to perform the operation.\n * @param setSettings.indexSettings - The indexSettings object.\n * @param setSettings.forwardToReplicas - Whether changes are applied to replica indices.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n setSettings({ indexName, indexSettings, forwardToReplicas }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `setSettings`.\");\n }\n if (!indexSettings) {\n throw new Error(\"Parameter `indexSettings` is required when calling `setSettings`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/settings\".replace(\"{indexName}\", encodeURIComponent(indexName));\n const headers = {};\n const queryParameters = {};\n if (forwardToReplicas !== void 0) {\n queryParameters.forwardToReplicas = forwardToReplicas.toString();\n }\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: indexSettings\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Replaces the permissions of an existing API key. Any unspecified attribute resets that attribute to its default value.\n *\n * Required API Key ACLs:\n * - admin.\n *\n * @param updateApiKey - The updateApiKey object.\n * @param updateApiKey.key - API key.\n * @param updateApiKey.apiKey - The apiKey object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateApiKey({ key, apiKey }, requestOptions) {\n if (!key) {\n throw new Error(\"Parameter `key` is required when calling `updateApiKey`.\");\n }\n if (!apiKey) {\n throw new Error(\"Parameter `apiKey` is required when calling `updateApiKey`.\");\n }\n if (!apiKey.acl) {\n throw new Error(\"Parameter `apiKey.acl` is required when calling `updateApiKey`.\");\n }\n const requestPath = \"/1/keys/{key}\".replace(\"{key}\", encodeURIComponent(key));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: apiKey\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction searchClient(appId, apiKey, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n return createSearchClient({\n appId,\n apiKey,\n timeouts: {\n connect: DEFAULT_CONNECT_TIMEOUT_BROWSER,\n read: DEFAULT_READ_TIMEOUT_BROWSER,\n write: DEFAULT_WRITE_TIMEOUT_BROWSER\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n apiClientVersion,\n searchClient\n};\n//# sourceMappingURL=browser.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/ingestionClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent } from \"@algolia/client-common\";\nvar apiClientVersion = \"1.19.0\";\nvar REGIONS = [\"eu\", \"us\"];\nfunction getDefaultHosts(region) {\n const url = \"data.{region}.algolia.com\".replace(\"{region}\", region);\n return [{ url, accept: \"readWrite\", protocol: \"https\" }];\n}\nfunction isOnDemandTrigger(trigger) {\n return trigger.type === \"onDemand\";\n}\nfunction isScheduleTrigger(trigger) {\n return trigger.type === \"schedule\";\n}\nfunction isSubscriptionTrigger(trigger) {\n return trigger.type === \"subscription\";\n}\nfunction createIngestionClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n region: regionOption,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(regionOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Ingestion\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * Creates a new authentication resource.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param authenticationCreate -\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n createAuthentication(authenticationCreate, requestOptions) {\n if (!authenticationCreate) {\n throw new Error(\"Parameter `authenticationCreate` is required when calling `createAuthentication`.\");\n }\n if (!authenticationCreate.type) {\n throw new Error(\"Parameter `authenticationCreate.type` is required when calling `createAuthentication`.\");\n }\n if (!authenticationCreate.name) {\n throw new Error(\"Parameter `authenticationCreate.name` is required when calling `createAuthentication`.\");\n }\n if (!authenticationCreate.input) {\n throw new Error(\"Parameter `authenticationCreate.input` is required when calling `createAuthentication`.\");\n }\n const requestPath = \"/1/authentications\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: authenticationCreate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Creates a new destination.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param destinationCreate -\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n createDestination(destinationCreate, requestOptions) {\n if (!destinationCreate) {\n throw new Error(\"Parameter `destinationCreate` is required when calling `createDestination`.\");\n }\n if (!destinationCreate.type) {\n throw new Error(\"Parameter `destinationCreate.type` is required when calling `createDestination`.\");\n }\n if (!destinationCreate.name) {\n throw new Error(\"Parameter `destinationCreate.name` is required when calling `createDestination`.\");\n }\n if (!destinationCreate.input) {\n throw new Error(\"Parameter `destinationCreate.input` is required when calling `createDestination`.\");\n }\n const requestPath = \"/1/destinations\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: destinationCreate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Creates a new source.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param sourceCreate -\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n createSource(sourceCreate, requestOptions) {\n if (!sourceCreate) {\n throw new Error(\"Parameter `sourceCreate` is required when calling `createSource`.\");\n }\n if (!sourceCreate.type) {\n throw new Error(\"Parameter `sourceCreate.type` is required when calling `createSource`.\");\n }\n if (!sourceCreate.name) {\n throw new Error(\"Parameter `sourceCreate.name` is required when calling `createSource`.\");\n }\n const requestPath = \"/1/sources\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: sourceCreate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Creates a new task.\n * @param taskCreate - Request body for creating a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n createTask(taskCreate, requestOptions) {\n if (!taskCreate) {\n throw new Error(\"Parameter `taskCreate` is required when calling `createTask`.\");\n }\n if (!taskCreate.sourceID) {\n throw new Error(\"Parameter `taskCreate.sourceID` is required when calling `createTask`.\");\n }\n if (!taskCreate.destinationID) {\n throw new Error(\"Parameter `taskCreate.destinationID` is required when calling `createTask`.\");\n }\n if (!taskCreate.action) {\n throw new Error(\"Parameter `taskCreate.action` is required when calling `createTask`.\");\n }\n const requestPath = \"/2/tasks\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: taskCreate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Creates a new task using the v1 endpoint, please use `createTask` instead.\n * @param taskCreate - Request body for creating a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n createTaskV1(taskCreate, requestOptions) {\n if (!taskCreate) {\n throw new Error(\"Parameter `taskCreate` is required when calling `createTaskV1`.\");\n }\n if (!taskCreate.sourceID) {\n throw new Error(\"Parameter `taskCreate.sourceID` is required when calling `createTaskV1`.\");\n }\n if (!taskCreate.destinationID) {\n throw new Error(\"Parameter `taskCreate.destinationID` is required when calling `createTaskV1`.\");\n }\n if (!taskCreate.trigger) {\n throw new Error(\"Parameter `taskCreate.trigger` is required when calling `createTaskV1`.\");\n }\n if (!taskCreate.action) {\n throw new Error(\"Parameter `taskCreate.action` is required when calling `createTaskV1`.\");\n }\n const requestPath = \"/1/tasks\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: taskCreate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Creates a new transformation.\n * @param transformationCreate - Request body for creating a transformation.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n createTransformation(transformationCreate, requestOptions) {\n if (!transformationCreate) {\n throw new Error(\"Parameter `transformationCreate` is required when calling `createTransformation`.\");\n }\n if (!transformationCreate.code) {\n throw new Error(\"Parameter `transformationCreate.code` is required when calling `createTransformation`.\");\n }\n if (!transformationCreate.name) {\n throw new Error(\"Parameter `transformationCreate.name` is required when calling `createTransformation`.\");\n }\n const requestPath = \"/1/transformations\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: transformationCreate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes an authentication resource. You can\\'t delete authentication resources that are used by a source or a destination.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param deleteAuthentication - The deleteAuthentication object.\n * @param deleteAuthentication.authenticationID - Unique identifier of an authentication resource.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteAuthentication({ authenticationID }, requestOptions) {\n if (!authenticationID) {\n throw new Error(\"Parameter `authenticationID` is required when calling `deleteAuthentication`.\");\n }\n const requestPath = \"/1/authentications/{authenticationID}\".replace(\n \"{authenticationID}\",\n encodeURIComponent(authenticationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a destination by its ID. You can\\'t delete destinations that are referenced in tasks.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param deleteDestination - The deleteDestination object.\n * @param deleteDestination.destinationID - Unique identifier of a destination.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteDestination({ destinationID }, requestOptions) {\n if (!destinationID) {\n throw new Error(\"Parameter `destinationID` is required when calling `deleteDestination`.\");\n }\n const requestPath = \"/1/destinations/{destinationID}\".replace(\n \"{destinationID}\",\n encodeURIComponent(destinationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a source by its ID. You can\\'t delete sources that are referenced in tasks.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param deleteSource - The deleteSource object.\n * @param deleteSource.sourceID - Unique identifier of a source.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteSource({ sourceID }, requestOptions) {\n if (!sourceID) {\n throw new Error(\"Parameter `sourceID` is required when calling `deleteSource`.\");\n }\n const requestPath = \"/1/sources/{sourceID}\".replace(\"{sourceID}\", encodeURIComponent(sourceID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a task by its ID.\n * @param deleteTask - The deleteTask object.\n * @param deleteTask.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteTask({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `deleteTask`.\");\n }\n const requestPath = \"/2/tasks/{taskID}\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a task by its ID using the v1 endpoint, please use `deleteTask` instead.\n * @param deleteTaskV1 - The deleteTaskV1 object.\n * @param deleteTaskV1.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteTaskV1({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `deleteTaskV1`.\");\n }\n const requestPath = \"/1/tasks/{taskID}\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a transformation by its ID.\n * @param deleteTransformation - The deleteTransformation object.\n * @param deleteTransformation.transformationID - Unique identifier of a transformation.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteTransformation({ transformationID }, requestOptions) {\n if (!transformationID) {\n throw new Error(\"Parameter `transformationID` is required when calling `deleteTransformation`.\");\n }\n const requestPath = \"/1/transformations/{transformationID}\".replace(\n \"{transformationID}\",\n encodeURIComponent(transformationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Disables a task.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param disableTask - The disableTask object.\n * @param disableTask.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n disableTask({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `disableTask`.\");\n }\n const requestPath = \"/2/tasks/{taskID}/disable\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Disables a task using the v1 endpoint, please use `disableTask` instead.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param disableTaskV1 - The disableTaskV1 object.\n * @param disableTaskV1.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n disableTaskV1({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `disableTaskV1`.\");\n }\n const requestPath = \"/1/tasks/{taskID}/disable\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Enables a task.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param enableTask - The enableTask object.\n * @param enableTask.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n enableTask({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `enableTask`.\");\n }\n const requestPath = \"/2/tasks/{taskID}/enable\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Enables a task using the v1 endpoint, please use `enableTask` instead.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param enableTaskV1 - The enableTaskV1 object.\n * @param enableTaskV1.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n enableTaskV1({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `enableTaskV1`.\");\n }\n const requestPath = \"/1/tasks/{taskID}/enable\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves an authentication resource by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getAuthentication - The getAuthentication object.\n * @param getAuthentication.authenticationID - Unique identifier of an authentication resource.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getAuthentication({ authenticationID }, requestOptions) {\n if (!authenticationID) {\n throw new Error(\"Parameter `authenticationID` is required when calling `getAuthentication`.\");\n }\n const requestPath = \"/1/authentications/{authenticationID}\".replace(\n \"{authenticationID}\",\n encodeURIComponent(authenticationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a destination by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getDestination - The getDestination object.\n * @param getDestination.destinationID - Unique identifier of a destination.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getDestination({ destinationID }, requestOptions) {\n if (!destinationID) {\n throw new Error(\"Parameter `destinationID` is required when calling `getDestination`.\");\n }\n const requestPath = \"/1/destinations/{destinationID}\".replace(\n \"{destinationID}\",\n encodeURIComponent(destinationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a single task run event by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getEvent - The getEvent object.\n * @param getEvent.runID - Unique identifier of a task run.\n * @param getEvent.eventID - Unique identifier of an event.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getEvent({ runID, eventID }, requestOptions) {\n if (!runID) {\n throw new Error(\"Parameter `runID` is required when calling `getEvent`.\");\n }\n if (!eventID) {\n throw new Error(\"Parameter `eventID` is required when calling `getEvent`.\");\n }\n const requestPath = \"/1/runs/{runID}/events/{eventID}\".replace(\"{runID}\", encodeURIComponent(runID)).replace(\"{eventID}\", encodeURIComponent(eventID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieve a single task run by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getRun - The getRun object.\n * @param getRun.runID - Unique identifier of a task run.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getRun({ runID }, requestOptions) {\n if (!runID) {\n throw new Error(\"Parameter `runID` is required when calling `getRun`.\");\n }\n const requestPath = \"/1/runs/{runID}\".replace(\"{runID}\", encodeURIComponent(runID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieve a source by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getSource - The getSource object.\n * @param getSource.sourceID - Unique identifier of a source.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getSource({ sourceID }, requestOptions) {\n if (!sourceID) {\n throw new Error(\"Parameter `sourceID` is required when calling `getSource`.\");\n }\n const requestPath = \"/1/sources/{sourceID}\".replace(\"{sourceID}\", encodeURIComponent(sourceID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a task by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getTask - The getTask object.\n * @param getTask.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTask({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `getTask`.\");\n }\n const requestPath = \"/2/tasks/{taskID}\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a task by its ID using the v1 endpoint, please use `getTask` instead.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getTaskV1 - The getTaskV1 object.\n * @param getTaskV1.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTaskV1({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `getTaskV1`.\");\n }\n const requestPath = \"/1/tasks/{taskID}\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a transformation by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param getTransformation - The getTransformation object.\n * @param getTransformation.transformationID - Unique identifier of a transformation.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getTransformation({ transformationID }, requestOptions) {\n if (!transformationID) {\n throw new Error(\"Parameter `transformationID` is required when calling `getTransformation`.\");\n }\n const requestPath = \"/1/transformations/{transformationID}\".replace(\n \"{transformationID}\",\n encodeURIComponent(transformationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a list of all authentication resources.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listAuthentications - The listAuthentications object.\n * @param listAuthentications.itemsPerPage - Number of items per page.\n * @param listAuthentications.page - Page number of the paginated API response.\n * @param listAuthentications.type - Type of authentication resource to retrieve.\n * @param listAuthentications.platform - Ecommerce platform for which to retrieve authentications.\n * @param listAuthentications.sort - Property by which to sort the list of authentications.\n * @param listAuthentications.order - Sort order of the response, ascending or descending.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listAuthentications({ itemsPerPage, page, type, platform, sort, order } = {}, requestOptions = void 0) {\n const requestPath = \"/1/authentications\";\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (type !== void 0) {\n queryParameters[\"type\"] = type.toString();\n }\n if (platform !== void 0) {\n queryParameters[\"platform\"] = platform.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a list of destinations.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listDestinations - The listDestinations object.\n * @param listDestinations.itemsPerPage - Number of items per page.\n * @param listDestinations.page - Page number of the paginated API response.\n * @param listDestinations.type - Destination type.\n * @param listDestinations.authenticationID - Authentication ID used by destinations.\n * @param listDestinations.transformationID - Get the list of destinations used by a transformation.\n * @param listDestinations.sort - Property by which to sort the destinations.\n * @param listDestinations.order - Sort order of the response, ascending or descending.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listDestinations({ itemsPerPage, page, type, authenticationID, transformationID, sort, order } = {}, requestOptions = void 0) {\n const requestPath = \"/1/destinations\";\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (type !== void 0) {\n queryParameters[\"type\"] = type.toString();\n }\n if (authenticationID !== void 0) {\n queryParameters[\"authenticationID\"] = authenticationID.toString();\n }\n if (transformationID !== void 0) {\n queryParameters[\"transformationID\"] = transformationID.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a list of events for a task run, identified by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listEvents - The listEvents object.\n * @param listEvents.runID - Unique identifier of a task run.\n * @param listEvents.itemsPerPage - Number of items per page.\n * @param listEvents.page - Page number of the paginated API response.\n * @param listEvents.status - Event status for filtering the list of task runs.\n * @param listEvents.type - Event type for filtering the list of task runs.\n * @param listEvents.sort - Property by which to sort the list of task run events.\n * @param listEvents.order - Sort order of the response, ascending or descending.\n * @param listEvents.startDate - Date and time in RFC 3339 format for the earliest events to retrieve. By default, the current time minus three hours is used.\n * @param listEvents.endDate - Date and time in RFC 3339 format for the latest events to retrieve. By default, the current time is used.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listEvents({ runID, itemsPerPage, page, status, type, sort, order, startDate, endDate }, requestOptions) {\n if (!runID) {\n throw new Error(\"Parameter `runID` is required when calling `listEvents`.\");\n }\n const requestPath = \"/1/runs/{runID}/events\".replace(\"{runID}\", encodeURIComponent(runID));\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (status !== void 0) {\n queryParameters[\"status\"] = status.toString();\n }\n if (type !== void 0) {\n queryParameters[\"type\"] = type.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieve a list of task runs.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listRuns - The listRuns object.\n * @param listRuns.itemsPerPage - Number of items per page.\n * @param listRuns.page - Page number of the paginated API response.\n * @param listRuns.status - Run status for filtering the list of task runs.\n * @param listRuns.type - Run type for filtering the list of task runs.\n * @param listRuns.taskID - Task ID for filtering the list of task runs.\n * @param listRuns.sort - Property by which to sort the list of task runs.\n * @param listRuns.order - Sort order of the response, ascending or descending.\n * @param listRuns.startDate - Date in RFC 3339 format for the earliest run to retrieve. By default, the current day minus seven days is used.\n * @param listRuns.endDate - Date in RFC 3339 format for the latest run to retrieve. By default, the current day is used.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listRuns({ itemsPerPage, page, status, type, taskID, sort, order, startDate, endDate } = {}, requestOptions = void 0) {\n const requestPath = \"/1/runs\";\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (status !== void 0) {\n queryParameters[\"status\"] = status.toString();\n }\n if (type !== void 0) {\n queryParameters[\"type\"] = type.toString();\n }\n if (taskID !== void 0) {\n queryParameters[\"taskID\"] = taskID.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n if (startDate !== void 0) {\n queryParameters[\"startDate\"] = startDate.toString();\n }\n if (endDate !== void 0) {\n queryParameters[\"endDate\"] = endDate.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a list of sources.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listSources - The listSources object.\n * @param listSources.itemsPerPage - Number of items per page.\n * @param listSources.page - Page number of the paginated API response.\n * @param listSources.type - Source type. Some sources require authentication.\n * @param listSources.authenticationID - Authentication IDs of the sources to retrieve. \\'none\\' returns sources that doesn\\'t have an authentication.\n * @param listSources.sort - Property by which to sort the list of sources.\n * @param listSources.order - Sort order of the response, ascending or descending.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listSources({ itemsPerPage, page, type, authenticationID, sort, order } = {}, requestOptions = void 0) {\n const requestPath = \"/1/sources\";\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (type !== void 0) {\n queryParameters[\"type\"] = type.toString();\n }\n if (authenticationID !== void 0) {\n queryParameters[\"authenticationID\"] = authenticationID.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a list of tasks.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listTasks - The listTasks object.\n * @param listTasks.itemsPerPage - Number of items per page.\n * @param listTasks.page - Page number of the paginated API response.\n * @param listTasks.action - Actions for filtering the list of tasks.\n * @param listTasks.enabled - Whether to filter the list of tasks by the `enabled` status.\n * @param listTasks.sourceID - Source IDs for filtering the list of tasks.\n * @param listTasks.sourceType - Filters the tasks with the specified source type.\n * @param listTasks.destinationID - Destination IDs for filtering the list of tasks.\n * @param listTasks.triggerType - Type of task trigger for filtering the list of tasks.\n * @param listTasks.withEmailNotifications - If specified, the response only includes tasks with notifications.email.enabled set to this value.\n * @param listTasks.sort - Property by which to sort the list of tasks.\n * @param listTasks.order - Sort order of the response, ascending or descending.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listTasks({\n itemsPerPage,\n page,\n action,\n enabled,\n sourceID,\n sourceType,\n destinationID,\n triggerType,\n withEmailNotifications,\n sort,\n order\n } = {}, requestOptions = void 0) {\n const requestPath = \"/2/tasks\";\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (action !== void 0) {\n queryParameters[\"action\"] = action.toString();\n }\n if (enabled !== void 0) {\n queryParameters[\"enabled\"] = enabled.toString();\n }\n if (sourceID !== void 0) {\n queryParameters[\"sourceID\"] = sourceID.toString();\n }\n if (sourceType !== void 0) {\n queryParameters[\"sourceType\"] = sourceType.toString();\n }\n if (destinationID !== void 0) {\n queryParameters[\"destinationID\"] = destinationID.toString();\n }\n if (triggerType !== void 0) {\n queryParameters[\"triggerType\"] = triggerType.toString();\n }\n if (withEmailNotifications !== void 0) {\n queryParameters[\"withEmailNotifications\"] = withEmailNotifications.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a list of tasks using the v1 endpoint, please use `getTasks` instead.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listTasksV1 - The listTasksV1 object.\n * @param listTasksV1.itemsPerPage - Number of items per page.\n * @param listTasksV1.page - Page number of the paginated API response.\n * @param listTasksV1.action - Actions for filtering the list of tasks.\n * @param listTasksV1.enabled - Whether to filter the list of tasks by the `enabled` status.\n * @param listTasksV1.sourceID - Source IDs for filtering the list of tasks.\n * @param listTasksV1.destinationID - Destination IDs for filtering the list of tasks.\n * @param listTasksV1.triggerType - Type of task trigger for filtering the list of tasks.\n * @param listTasksV1.sort - Property by which to sort the list of tasks.\n * @param listTasksV1.order - Sort order of the response, ascending or descending.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listTasksV1({ itemsPerPage, page, action, enabled, sourceID, destinationID, triggerType, sort, order } = {}, requestOptions = void 0) {\n const requestPath = \"/1/tasks\";\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (action !== void 0) {\n queryParameters[\"action\"] = action.toString();\n }\n if (enabled !== void 0) {\n queryParameters[\"enabled\"] = enabled.toString();\n }\n if (sourceID !== void 0) {\n queryParameters[\"sourceID\"] = sourceID.toString();\n }\n if (destinationID !== void 0) {\n queryParameters[\"destinationID\"] = destinationID.toString();\n }\n if (triggerType !== void 0) {\n queryParameters[\"triggerType\"] = triggerType.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a list of transformations.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param listTransformations - The listTransformations object.\n * @param listTransformations.itemsPerPage - Number of items per page.\n * @param listTransformations.page - Page number of the paginated API response.\n * @param listTransformations.sort - Property by which to sort the list of transformations.\n * @param listTransformations.order - Sort order of the response, ascending or descending.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n listTransformations({ itemsPerPage, page, sort, order } = {}, requestOptions = void 0) {\n const requestPath = \"/1/transformations\";\n const headers = {};\n const queryParameters = {};\n if (itemsPerPage !== void 0) {\n queryParameters[\"itemsPerPage\"] = itemsPerPage.toString();\n }\n if (page !== void 0) {\n queryParameters[\"page\"] = page.toString();\n }\n if (sort !== void 0) {\n queryParameters[\"sort\"] = sort.toString();\n }\n if (order !== void 0) {\n queryParameters[\"order\"] = order.toString();\n }\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the observability endpoints.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param pushTask - The pushTask object.\n * @param pushTask.taskID - Unique identifier of a task.\n * @param pushTask.pushTaskPayload - Request body of a Search API `batch` request that will be pushed in the Connectors pipeline.\n * @param pushTask.watch - When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n pushTask({ taskID, pushTaskPayload, watch }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `pushTask`.\");\n }\n if (!pushTaskPayload) {\n throw new Error(\"Parameter `pushTaskPayload` is required when calling `pushTask`.\");\n }\n if (!pushTaskPayload.action) {\n throw new Error(\"Parameter `pushTaskPayload.action` is required when calling `pushTask`.\");\n }\n if (!pushTaskPayload.records) {\n throw new Error(\"Parameter `pushTaskPayload.records` is required when calling `pushTask`.\");\n }\n const requestPath = \"/2/tasks/{taskID}/push\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n if (watch !== void 0) {\n queryParameters[\"watch\"] = watch.toString();\n }\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: pushTaskPayload\n };\n requestOptions = {\n timeouts: {\n connect: 18e4,\n read: 18e4,\n write: 18e4,\n ...requestOptions?.timeouts\n }\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Runs all tasks linked to a source, only available for Shopify sources. It will create 1 run per task.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param runSource - The runSource object.\n * @param runSource.sourceID - Unique identifier of a source.\n * @param runSource.runSourcePayload -\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n runSource({ sourceID, runSourcePayload }, requestOptions) {\n if (!sourceID) {\n throw new Error(\"Parameter `sourceID` is required when calling `runSource`.\");\n }\n const requestPath = \"/1/sources/{sourceID}/run\".replace(\"{sourceID}\", encodeURIComponent(sourceID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: runSourcePayload ? runSourcePayload : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Runs a task. You can check the status of task runs with the observability endpoints.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param runTask - The runTask object.\n * @param runTask.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n runTask({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `runTask`.\");\n }\n const requestPath = \"/2/tasks/{taskID}/run\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Runs a task using the v1 endpoint, please use `runTask` instead. You can check the status of task runs with the observability endpoints.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param runTaskV1 - The runTaskV1 object.\n * @param runTaskV1.taskID - Unique identifier of a task.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n runTaskV1({ taskID }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `runTaskV1`.\");\n }\n const requestPath = \"/1/tasks/{taskID}/run\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for authentication resources.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param authenticationSearch - The authenticationSearch object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchAuthentications(authenticationSearch, requestOptions) {\n if (!authenticationSearch) {\n throw new Error(\"Parameter `authenticationSearch` is required when calling `searchAuthentications`.\");\n }\n if (!authenticationSearch.authenticationIDs) {\n throw new Error(\n \"Parameter `authenticationSearch.authenticationIDs` is required when calling `searchAuthentications`.\"\n );\n }\n const requestPath = \"/1/authentications/search\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: authenticationSearch\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for destinations.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param destinationSearch - The destinationSearch object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchDestinations(destinationSearch, requestOptions) {\n if (!destinationSearch) {\n throw new Error(\"Parameter `destinationSearch` is required when calling `searchDestinations`.\");\n }\n if (!destinationSearch.destinationIDs) {\n throw new Error(\"Parameter `destinationSearch.destinationIDs` is required when calling `searchDestinations`.\");\n }\n const requestPath = \"/1/destinations/search\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: destinationSearch\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for sources.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param sourceSearch - The sourceSearch object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchSources(sourceSearch, requestOptions) {\n if (!sourceSearch) {\n throw new Error(\"Parameter `sourceSearch` is required when calling `searchSources`.\");\n }\n if (!sourceSearch.sourceIDs) {\n throw new Error(\"Parameter `sourceSearch.sourceIDs` is required when calling `searchSources`.\");\n }\n const requestPath = \"/1/sources/search\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: sourceSearch\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for tasks.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param taskSearch - The taskSearch object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchTasks(taskSearch, requestOptions) {\n if (!taskSearch) {\n throw new Error(\"Parameter `taskSearch` is required when calling `searchTasks`.\");\n }\n if (!taskSearch.taskIDs) {\n throw new Error(\"Parameter `taskSearch.taskIDs` is required when calling `searchTasks`.\");\n }\n const requestPath = \"/2/tasks/search\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: taskSearch\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for tasks using the v1 endpoint, please use `searchTasks` instead.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param taskSearch - The taskSearch object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchTasksV1(taskSearch, requestOptions) {\n if (!taskSearch) {\n throw new Error(\"Parameter `taskSearch` is required when calling `searchTasksV1`.\");\n }\n if (!taskSearch.taskIDs) {\n throw new Error(\"Parameter `taskSearch.taskIDs` is required when calling `searchTasksV1`.\");\n }\n const requestPath = \"/1/tasks/search\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: taskSearch\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for transformations.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param transformationSearch - The transformationSearch object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchTransformations(transformationSearch, requestOptions) {\n if (!transformationSearch) {\n throw new Error(\"Parameter `transformationSearch` is required when calling `searchTransformations`.\");\n }\n if (!transformationSearch.transformationIDs) {\n throw new Error(\n \"Parameter `transformationSearch.transformationIDs` is required when calling `searchTransformations`.\"\n );\n }\n const requestPath = \"/1/transformations/search\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: transformationSearch\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Triggers a stream-listing request for a source. Triggering stream-listing requests only works with sources with `type: docker` and `imageType: airbyte`.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param triggerDockerSourceDiscover - The triggerDockerSourceDiscover object.\n * @param triggerDockerSourceDiscover.sourceID - Unique identifier of a source.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n triggerDockerSourceDiscover({ sourceID }, requestOptions) {\n if (!sourceID) {\n throw new Error(\"Parameter `sourceID` is required when calling `triggerDockerSourceDiscover`.\");\n }\n const requestPath = \"/1/sources/{sourceID}/discover\".replace(\"{sourceID}\", encodeURIComponent(sourceID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers\n };\n requestOptions = {\n timeouts: {\n connect: 18e4,\n read: 18e4,\n write: 18e4,\n ...requestOptions?.timeouts\n }\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Try a transformation before creating it.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param transformationTry - The transformationTry object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n tryTransformation(transformationTry, requestOptions) {\n if (!transformationTry) {\n throw new Error(\"Parameter `transformationTry` is required when calling `tryTransformation`.\");\n }\n if (!transformationTry.code) {\n throw new Error(\"Parameter `transformationTry.code` is required when calling `tryTransformation`.\");\n }\n if (!transformationTry.sampleRecord) {\n throw new Error(\"Parameter `transformationTry.sampleRecord` is required when calling `tryTransformation`.\");\n }\n const requestPath = \"/1/transformations/try\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: transformationTry\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Try a transformation before updating it.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param tryTransformationBeforeUpdate - The tryTransformationBeforeUpdate object.\n * @param tryTransformationBeforeUpdate.transformationID - Unique identifier of a transformation.\n * @param tryTransformationBeforeUpdate.transformationTry - The transformationTry object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n tryTransformationBeforeUpdate({ transformationID, transformationTry }, requestOptions) {\n if (!transformationID) {\n throw new Error(\"Parameter `transformationID` is required when calling `tryTransformationBeforeUpdate`.\");\n }\n if (!transformationTry) {\n throw new Error(\"Parameter `transformationTry` is required when calling `tryTransformationBeforeUpdate`.\");\n }\n if (!transformationTry.code) {\n throw new Error(\"Parameter `transformationTry.code` is required when calling `tryTransformationBeforeUpdate`.\");\n }\n if (!transformationTry.sampleRecord) {\n throw new Error(\n \"Parameter `transformationTry.sampleRecord` is required when calling `tryTransformationBeforeUpdate`.\"\n );\n }\n const requestPath = \"/1/transformations/{transformationID}/try\".replace(\n \"{transformationID}\",\n encodeURIComponent(transformationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: transformationTry\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Updates an authentication resource.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param updateAuthentication - The updateAuthentication object.\n * @param updateAuthentication.authenticationID - Unique identifier of an authentication resource.\n * @param updateAuthentication.authenticationUpdate - The authenticationUpdate object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateAuthentication({ authenticationID, authenticationUpdate }, requestOptions) {\n if (!authenticationID) {\n throw new Error(\"Parameter `authenticationID` is required when calling `updateAuthentication`.\");\n }\n if (!authenticationUpdate) {\n throw new Error(\"Parameter `authenticationUpdate` is required when calling `updateAuthentication`.\");\n }\n const requestPath = \"/1/authentications/{authenticationID}\".replace(\n \"{authenticationID}\",\n encodeURIComponent(authenticationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PATCH\",\n path: requestPath,\n queryParameters,\n headers,\n data: authenticationUpdate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Updates the destination by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param updateDestination - The updateDestination object.\n * @param updateDestination.destinationID - Unique identifier of a destination.\n * @param updateDestination.destinationUpdate - The destinationUpdate object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateDestination({ destinationID, destinationUpdate }, requestOptions) {\n if (!destinationID) {\n throw new Error(\"Parameter `destinationID` is required when calling `updateDestination`.\");\n }\n if (!destinationUpdate) {\n throw new Error(\"Parameter `destinationUpdate` is required when calling `updateDestination`.\");\n }\n const requestPath = \"/1/destinations/{destinationID}\".replace(\n \"{destinationID}\",\n encodeURIComponent(destinationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PATCH\",\n path: requestPath,\n queryParameters,\n headers,\n data: destinationUpdate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Updates a source by its ID.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param updateSource - The updateSource object.\n * @param updateSource.sourceID - Unique identifier of a source.\n * @param updateSource.sourceUpdate - The sourceUpdate object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateSource({ sourceID, sourceUpdate }, requestOptions) {\n if (!sourceID) {\n throw new Error(\"Parameter `sourceID` is required when calling `updateSource`.\");\n }\n if (!sourceUpdate) {\n throw new Error(\"Parameter `sourceUpdate` is required when calling `updateSource`.\");\n }\n const requestPath = \"/1/sources/{sourceID}\".replace(\"{sourceID}\", encodeURIComponent(sourceID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PATCH\",\n path: requestPath,\n queryParameters,\n headers,\n data: sourceUpdate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Updates a task by its ID.\n * @param updateTask - The updateTask object.\n * @param updateTask.taskID - Unique identifier of a task.\n * @param updateTask.taskUpdate - The taskUpdate object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateTask({ taskID, taskUpdate }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `updateTask`.\");\n }\n if (!taskUpdate) {\n throw new Error(\"Parameter `taskUpdate` is required when calling `updateTask`.\");\n }\n const requestPath = \"/2/tasks/{taskID}\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PATCH\",\n path: requestPath,\n queryParameters,\n headers,\n data: taskUpdate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Updates a task by its ID using the v1 endpoint, please use `updateTask` instead.\n * @param updateTaskV1 - The updateTaskV1 object.\n * @param updateTaskV1.taskID - Unique identifier of a task.\n * @param updateTaskV1.taskUpdate - The taskUpdate object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateTaskV1({ taskID, taskUpdate }, requestOptions) {\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `updateTaskV1`.\");\n }\n if (!taskUpdate) {\n throw new Error(\"Parameter `taskUpdate` is required when calling `updateTaskV1`.\");\n }\n const requestPath = \"/1/tasks/{taskID}\".replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PATCH\",\n path: requestPath,\n queryParameters,\n headers,\n data: taskUpdate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Updates a transformation by its ID.\n * @param updateTransformation - The updateTransformation object.\n * @param updateTransformation.transformationID - Unique identifier of a transformation.\n * @param updateTransformation.transformationCreate - The transformationCreate object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n updateTransformation({ transformationID, transformationCreate }, requestOptions) {\n if (!transformationID) {\n throw new Error(\"Parameter `transformationID` is required when calling `updateTransformation`.\");\n }\n if (!transformationCreate) {\n throw new Error(\"Parameter `transformationCreate` is required when calling `updateTransformation`.\");\n }\n if (!transformationCreate.code) {\n throw new Error(\"Parameter `transformationCreate.code` is required when calling `updateTransformation`.\");\n }\n if (!transformationCreate.name) {\n throw new Error(\"Parameter `transformationCreate.name` is required when calling `updateTransformation`.\");\n }\n const requestPath = \"/1/transformations/{transformationID}\".replace(\n \"{transformationID}\",\n encodeURIComponent(transformationID)\n );\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: transformationCreate\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Validates a source payload to ensure it can be created and that the data source can be reached by Algolia.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param sourceCreate -\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n validateSource(sourceCreate, requestOptions = void 0) {\n const requestPath = \"/1/sources/validate\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: sourceCreate ? sourceCreate : {}\n };\n requestOptions = {\n timeouts: {\n connect: 18e4,\n read: 18e4,\n write: 18e4,\n ...requestOptions?.timeouts\n }\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Validates an update of a source payload to ensure it can be created and that the data source can be reached by Algolia.\n *\n * Required API Key ACLs:\n * - addObject\n * - deleteIndex\n * - editSettings\n * @param validateSourceBeforeUpdate - The validateSourceBeforeUpdate object.\n * @param validateSourceBeforeUpdate.sourceID - Unique identifier of a source.\n * @param validateSourceBeforeUpdate.sourceUpdate - The sourceUpdate object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n validateSourceBeforeUpdate({ sourceID, sourceUpdate }, requestOptions) {\n if (!sourceID) {\n throw new Error(\"Parameter `sourceID` is required when calling `validateSourceBeforeUpdate`.\");\n }\n if (!sourceUpdate) {\n throw new Error(\"Parameter `sourceUpdate` is required when calling `validateSourceBeforeUpdate`.\");\n }\n const requestPath = \"/1/sources/{sourceID}/validate\".replace(\"{sourceID}\", encodeURIComponent(sourceID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: sourceUpdate\n };\n requestOptions = {\n timeouts: {\n connect: 18e4,\n read: 18e4,\n write: 18e4,\n ...requestOptions?.timeouts\n }\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction ingestionClient(appId, apiKey, region, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n if (!region || region && (typeof region !== \"string\" || !REGIONS.includes(region))) {\n throw new Error(`\\`region\\` is required and must be one of the following: ${REGIONS.join(\", \")}`);\n }\n return createIngestionClient({\n appId,\n apiKey,\n region,\n timeouts: {\n connect: 25e3,\n read: 25e3,\n write: 25e3\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n apiClientVersion,\n ingestionClient,\n isOnDemandTrigger,\n isScheduleTrigger,\n isSubscriptionTrigger\n};\n//# sourceMappingURL=browser.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/monitoringClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent } from \"@algolia/client-common\";\nvar apiClientVersion = \"1.19.0\";\nfunction getDefaultHosts() {\n return [{ url: \"status.algolia.com\", accept: \"readWrite\", protocol: \"https\" }];\n}\nfunction createMonitoringClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Monitoring\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves known incidents for the selected clusters.\n * @param getClusterIncidents - The getClusterIncidents object.\n * @param getClusterIncidents.clusters - Subset of clusters, separated by commas.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getClusterIncidents({ clusters }, requestOptions) {\n if (!clusters) {\n throw new Error(\"Parameter `clusters` is required when calling `getClusterIncidents`.\");\n }\n const requestPath = \"/1/incidents/{clusters}\".replace(\"{clusters}\", encodeURIComponent(clusters));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the status of selected clusters.\n * @param getClusterStatus - The getClusterStatus object.\n * @param getClusterStatus.clusters - Subset of clusters, separated by commas.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getClusterStatus({ clusters }, requestOptions) {\n if (!clusters) {\n throw new Error(\"Parameter `clusters` is required when calling `getClusterStatus`.\");\n }\n const requestPath = \"/1/status/{clusters}\".replace(\"{clusters}\", encodeURIComponent(clusters));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves known incidents for all clusters.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getIncidents(requestOptions) {\n const requestPath = \"/1/incidents\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves average times for indexing operations for selected clusters.\n * @param getIndexingTime - The getIndexingTime object.\n * @param getIndexingTime.clusters - Subset of clusters, separated by commas.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getIndexingTime({ clusters }, requestOptions) {\n if (!clusters) {\n throw new Error(\"Parameter `clusters` is required when calling `getIndexingTime`.\");\n }\n const requestPath = \"/1/indexing/{clusters}\".replace(\"{clusters}\", encodeURIComponent(clusters));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the average latency for search requests for selected clusters.\n * @param getLatency - The getLatency object.\n * @param getLatency.clusters - Subset of clusters, separated by commas.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getLatency({ clusters }, requestOptions) {\n if (!clusters) {\n throw new Error(\"Parameter `clusters` is required when calling `getLatency`.\");\n }\n const requestPath = \"/1/latency/{clusters}\".replace(\"{clusters}\", encodeURIComponent(clusters));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves metrics related to your Algolia infrastructure, aggregated over a selected time window. Access to this API is available as part of the [Premium or Elevate plans](https://www.algolia.com/pricing). You must authenticate requests with the `x-algolia-application-id` and `x-algolia-api-key` headers (using the Monitoring API key).\n * @param getMetrics - The getMetrics object.\n * @param getMetrics.metric - Metric to report. For more information about the individual metrics, see the description of the API response. To include all metrics, use `*`.\n * @param getMetrics.period - Period over which to aggregate the metrics: - `minute`. Aggregate the last minute. 1 data point per 10 seconds. - `hour`. Aggregate the last hour. 1 data point per minute. - `day`. Aggregate the last day. 1 data point per 10 minutes. - `week`. Aggregate the last week. 1 data point per hour. - `month`. Aggregate the last month. 1 data point per day.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getMetrics({ metric, period }, requestOptions) {\n if (!metric) {\n throw new Error(\"Parameter `metric` is required when calling `getMetrics`.\");\n }\n if (!period) {\n throw new Error(\"Parameter `period` is required when calling `getMetrics`.\");\n }\n const requestPath = \"/1/infrastructure/{metric}/period/{period}\".replace(\"{metric}\", encodeURIComponent(metric)).replace(\"{period}\", encodeURIComponent(period));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Test whether clusters are reachable or not.\n * @param getReachability - The getReachability object.\n * @param getReachability.clusters - Subset of clusters, separated by commas.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getReachability({ clusters }, requestOptions) {\n if (!clusters) {\n throw new Error(\"Parameter `clusters` is required when calling `getReachability`.\");\n }\n const requestPath = \"/1/reachability/{clusters}/probes\".replace(\"{clusters}\", encodeURIComponent(clusters));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the servers that belong to clusters. The response depends on whether you authenticate your API request: - With authentication, the response lists the servers assigned to your Algolia application\\'s cluster. - Without authentication, the response lists the servers for all Algolia clusters.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getServers(requestOptions) {\n const requestPath = \"/1/inventory/servers\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves the status of all Algolia clusters and instances.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getStatus(requestOptions) {\n const requestPath = \"/1/status\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction monitoringClient(appId, apiKey, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n return createMonitoringClient({\n appId,\n apiKey,\n timeouts: {\n connect: 1e3,\n read: 2e3,\n write: 3e4\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n apiClientVersion,\n monitoringClient\n};\n//# sourceMappingURL=browser.js.map","function m(){function r(t){return new Promise(s=>{let e=new XMLHttpRequest;e.open(t.method,t.url,!0),Object.keys(t.headers).forEach(n=>e.setRequestHeader(n,t.headers[n]));let i=(n,a)=>setTimeout(()=>{e.abort(),s({status:0,content:a,isTimedOut:!0})},n),u=i(t.connectTimeout,\"Connection timeout\"),o;e.onreadystatechange=()=>{e.readyState>e.OPENED&&o===void 0&&(clearTimeout(u),o=i(t.responseTimeout,\"Socket timeout\"))},e.onerror=()=>{e.status===0&&(clearTimeout(u),clearTimeout(o),s({content:e.responseText||\"Network request failed\",status:e.status,isTimedOut:!1}))},e.onload=()=>{clearTimeout(u),clearTimeout(o),s({content:e.responseText,status:e.status,isTimedOut:!1})},e.send(t.data)})}return{send:r}}export{m as createXhrRequester};\n//# sourceMappingURL=requester.xhr.js.map","// src/cache/createBrowserLocalStorageCache.ts\nfunction createBrowserLocalStorageCache(options) {\n let storage;\n const namespaceKey = `algolia-client-js-${options.key}`;\n function getStorage() {\n if (storage === void 0) {\n storage = options.localStorage || window.localStorage;\n }\n return storage;\n }\n function getNamespace() {\n return JSON.parse(getStorage().getItem(namespaceKey) || \"{}\");\n }\n function setNamespace(namespace) {\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n }\n function removeOutdatedCacheItems() {\n const timeToLive = options.timeToLive ? options.timeToLive * 1e3 : null;\n const namespace = getNamespace();\n const filteredNamespaceWithoutOldFormattedCacheItems = Object.fromEntries(\n Object.entries(namespace).filter(([, cacheItem]) => {\n return cacheItem.timestamp !== void 0;\n })\n );\n setNamespace(filteredNamespaceWithoutOldFormattedCacheItems);\n if (!timeToLive) {\n return;\n }\n const filteredNamespaceWithoutExpiredItems = Object.fromEntries(\n Object.entries(filteredNamespaceWithoutOldFormattedCacheItems).filter(([, cacheItem]) => {\n const currentTimestamp = (/* @__PURE__ */ new Date()).getTime();\n const isExpired = cacheItem.timestamp + timeToLive < currentTimestamp;\n return !isExpired;\n })\n );\n setNamespace(filteredNamespaceWithoutExpiredItems);\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return Promise.resolve().then(() => {\n removeOutdatedCacheItems();\n return getNamespace()[JSON.stringify(key)];\n }).then((value) => {\n return Promise.all([value ? value.value : defaultValue(), value !== void 0]);\n }).then(([value, exists]) => {\n return Promise.all([value, exists || events.miss(value)]);\n }).then(([value]) => value);\n },\n set(key, value) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n namespace[JSON.stringify(key)] = {\n timestamp: (/* @__PURE__ */ new Date()).getTime(),\n value\n };\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n return value;\n });\n },\n delete(key) {\n return Promise.resolve().then(() => {\n const namespace = getNamespace();\n delete namespace[JSON.stringify(key)];\n getStorage().setItem(namespaceKey, JSON.stringify(namespace));\n });\n },\n clear() {\n return Promise.resolve().then(() => {\n getStorage().removeItem(namespaceKey);\n });\n }\n };\n}\n\n// src/cache/createNullCache.ts\nfunction createNullCache() {\n return {\n get(_key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const value = defaultValue();\n return value.then((result) => Promise.all([result, events.miss(result)])).then(([result]) => result);\n },\n set(_key, value) {\n return Promise.resolve(value);\n },\n delete(_key) {\n return Promise.resolve();\n },\n clear() {\n return Promise.resolve();\n }\n };\n}\n\n// src/cache/createFallbackableCache.ts\nfunction createFallbackableCache(options) {\n const caches = [...options.caches];\n const current = caches.shift();\n if (current === void 0) {\n return createNullCache();\n }\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n return current.get(key, defaultValue, events).catch(() => {\n return createFallbackableCache({ caches }).get(key, defaultValue, events);\n });\n },\n set(key, value) {\n return current.set(key, value).catch(() => {\n return createFallbackableCache({ caches }).set(key, value);\n });\n },\n delete(key) {\n return current.delete(key).catch(() => {\n return createFallbackableCache({ caches }).delete(key);\n });\n },\n clear() {\n return current.clear().catch(() => {\n return createFallbackableCache({ caches }).clear();\n });\n }\n };\n}\n\n// src/cache/createMemoryCache.ts\nfunction createMemoryCache(options = { serializable: true }) {\n let cache = {};\n return {\n get(key, defaultValue, events = {\n miss: () => Promise.resolve()\n }) {\n const keyAsString = JSON.stringify(key);\n if (keyAsString in cache) {\n return Promise.resolve(options.serializable ? JSON.parse(cache[keyAsString]) : cache[keyAsString]);\n }\n const promise = defaultValue();\n return promise.then((value) => events.miss(value)).then(() => promise);\n },\n set(key, value) {\n cache[JSON.stringify(key)] = options.serializable ? JSON.stringify(value) : value;\n return Promise.resolve(value);\n },\n delete(key) {\n delete cache[JSON.stringify(key)];\n return Promise.resolve();\n },\n clear() {\n cache = {};\n return Promise.resolve();\n }\n };\n}\n\n// src/constants.ts\nvar DEFAULT_CONNECT_TIMEOUT_BROWSER = 1e3;\nvar DEFAULT_READ_TIMEOUT_BROWSER = 2e3;\nvar DEFAULT_WRITE_TIMEOUT_BROWSER = 3e4;\nvar DEFAULT_CONNECT_TIMEOUT_NODE = 2e3;\nvar DEFAULT_READ_TIMEOUT_NODE = 5e3;\nvar DEFAULT_WRITE_TIMEOUT_NODE = 3e4;\n\n// src/createAlgoliaAgent.ts\nfunction createAlgoliaAgent(version) {\n const algoliaAgent = {\n value: `Algolia for JavaScript (${version})`,\n add(options) {\n const addedAlgoliaAgent = `; ${options.segment}${options.version !== void 0 ? ` (${options.version})` : \"\"}`;\n if (algoliaAgent.value.indexOf(addedAlgoliaAgent) === -1) {\n algoliaAgent.value = `${algoliaAgent.value}${addedAlgoliaAgent}`;\n }\n return algoliaAgent;\n }\n };\n return algoliaAgent;\n}\n\n// src/createAuth.ts\nfunction createAuth(appId, apiKey, authMode = \"WithinHeaders\") {\n const credentials = {\n \"x-algolia-api-key\": apiKey,\n \"x-algolia-application-id\": appId\n };\n return {\n headers() {\n return authMode === \"WithinHeaders\" ? credentials : {};\n },\n queryParameters() {\n return authMode === \"WithinQueryParameters\" ? credentials : {};\n }\n };\n}\n\n// src/createIterablePromise.ts\nfunction createIterablePromise({\n func,\n validate,\n aggregator,\n error,\n timeout = () => 0\n}) {\n const retry = (previousResponse) => {\n return new Promise((resolve, reject) => {\n func(previousResponse).then(async (response) => {\n if (aggregator) {\n await aggregator(response);\n }\n if (await validate(response)) {\n return resolve(response);\n }\n if (error && await error.validate(response)) {\n return reject(new Error(await error.message(response)));\n }\n return setTimeout(\n () => {\n retry(response).then(resolve).catch(reject);\n },\n await timeout()\n );\n }).catch((err) => {\n reject(err);\n });\n });\n };\n return retry();\n}\n\n// src/getAlgoliaAgent.ts\nfunction getAlgoliaAgent({ algoliaAgents, client, version }) {\n const defaultAlgoliaAgent = createAlgoliaAgent(version).add({\n segment: client,\n version\n });\n algoliaAgents.forEach((algoliaAgent) => defaultAlgoliaAgent.add(algoliaAgent));\n return defaultAlgoliaAgent;\n}\n\n// src/logger/createNullLogger.ts\nfunction createNullLogger() {\n return {\n debug(_message, _args) {\n return Promise.resolve();\n },\n info(_message, _args) {\n return Promise.resolve();\n },\n error(_message, _args) {\n return Promise.resolve();\n }\n };\n}\n\n// src/transporter/createStatefulHost.ts\nvar EXPIRATION_DELAY = 2 * 60 * 1e3;\nfunction createStatefulHost(host, status = \"up\") {\n const lastUpdate = Date.now();\n function isUp() {\n return status === \"up\" || Date.now() - lastUpdate > EXPIRATION_DELAY;\n }\n function isTimedOut() {\n return status === \"timed out\" && Date.now() - lastUpdate <= EXPIRATION_DELAY;\n }\n return { ...host, status, lastUpdate, isUp, isTimedOut };\n}\n\n// src/transporter/errors.ts\nvar AlgoliaError = class extends Error {\n name = \"AlgoliaError\";\n constructor(message, name) {\n super(message);\n if (name) {\n this.name = name;\n }\n }\n};\nvar ErrorWithStackTrace = class extends AlgoliaError {\n stackTrace;\n constructor(message, stackTrace, name) {\n super(message, name);\n this.stackTrace = stackTrace;\n }\n};\nvar RetryError = class extends ErrorWithStackTrace {\n constructor(stackTrace) {\n super(\n \"Unreachable hosts - your application id may be incorrect. If the error persists, please reach out to the Algolia Support team: https://alg.li/support.\",\n stackTrace,\n \"RetryError\"\n );\n }\n};\nvar ApiError = class extends ErrorWithStackTrace {\n status;\n constructor(message, status, stackTrace, name = \"ApiError\") {\n super(message, stackTrace, name);\n this.status = status;\n }\n};\nvar DeserializationError = class extends AlgoliaError {\n response;\n constructor(message, response) {\n super(message, \"DeserializationError\");\n this.response = response;\n }\n};\nvar DetailedApiError = class extends ApiError {\n error;\n constructor(message, status, error, stackTrace) {\n super(message, status, stackTrace, \"DetailedApiError\");\n this.error = error;\n }\n};\n\n// src/transporter/helpers.ts\nfunction shuffle(array) {\n const shuffledArray = array;\n for (let c = array.length - 1; c > 0; c--) {\n const b = Math.floor(Math.random() * (c + 1));\n const a = array[c];\n shuffledArray[c] = array[b];\n shuffledArray[b] = a;\n }\n return shuffledArray;\n}\nfunction serializeUrl(host, path, queryParameters) {\n const queryParametersAsString = serializeQueryParameters(queryParameters);\n let url = `${host.protocol}://${host.url}${host.port ? `:${host.port}` : \"\"}/${path.charAt(0) === \"/\" ? path.substring(1) : path}`;\n if (queryParametersAsString.length) {\n url += `?${queryParametersAsString}`;\n }\n return url;\n}\nfunction serializeQueryParameters(parameters) {\n return Object.keys(parameters).filter((key) => parameters[key] !== void 0).sort().map(\n (key) => `${key}=${encodeURIComponent(\n Object.prototype.toString.call(parameters[key]) === \"[object Array]\" ? parameters[key].join(\",\") : parameters[key]\n ).replace(/\\+/g, \"%20\")}`\n ).join(\"&\");\n}\nfunction serializeData(request, requestOptions) {\n if (request.method === \"GET\" || request.data === void 0 && requestOptions.data === void 0) {\n return void 0;\n }\n const data = Array.isArray(request.data) ? request.data : { ...request.data, ...requestOptions.data };\n return JSON.stringify(data);\n}\nfunction serializeHeaders(baseHeaders, requestHeaders, requestOptionsHeaders) {\n const headers = {\n Accept: \"application/json\",\n ...baseHeaders,\n ...requestHeaders,\n ...requestOptionsHeaders\n };\n const serializedHeaders = {};\n Object.keys(headers).forEach((header) => {\n const value = headers[header];\n serializedHeaders[header.toLowerCase()] = value;\n });\n return serializedHeaders;\n}\nfunction deserializeSuccess(response) {\n try {\n return JSON.parse(response.content);\n } catch (e) {\n throw new DeserializationError(e.message, response);\n }\n}\nfunction deserializeFailure({ content, status }, stackFrame) {\n try {\n const parsed = JSON.parse(content);\n if (\"error\" in parsed) {\n return new DetailedApiError(parsed.message, status, parsed.error, stackFrame);\n }\n return new ApiError(parsed.message, status, stackFrame);\n } catch {\n }\n return new ApiError(content, status, stackFrame);\n}\n\n// src/transporter/responses.ts\nfunction isNetworkError({ isTimedOut, status }) {\n return !isTimedOut && ~~status === 0;\n}\nfunction isRetryable({ isTimedOut, status }) {\n return isTimedOut || isNetworkError({ isTimedOut, status }) || ~~(status / 100) !== 2 && ~~(status / 100) !== 4;\n}\nfunction isSuccess({ status }) {\n return ~~(status / 100) === 2;\n}\n\n// src/transporter/stackTrace.ts\nfunction stackTraceWithoutCredentials(stackTrace) {\n return stackTrace.map((stackFrame) => stackFrameWithoutCredentials(stackFrame));\n}\nfunction stackFrameWithoutCredentials(stackFrame) {\n const modifiedHeaders = stackFrame.request.headers[\"x-algolia-api-key\"] ? { \"x-algolia-api-key\": \"*****\" } : {};\n return {\n ...stackFrame,\n request: {\n ...stackFrame.request,\n headers: {\n ...stackFrame.request.headers,\n ...modifiedHeaders\n }\n }\n };\n}\n\n// src/transporter/createTransporter.ts\nfunction createTransporter({\n hosts,\n hostsCache,\n baseHeaders,\n logger,\n baseQueryParameters,\n algoliaAgent,\n timeouts,\n requester,\n requestsCache,\n responsesCache\n}) {\n async function createRetryableOptions(compatibleHosts) {\n const statefulHosts = await Promise.all(\n compatibleHosts.map((compatibleHost) => {\n return hostsCache.get(compatibleHost, () => {\n return Promise.resolve(createStatefulHost(compatibleHost));\n });\n })\n );\n const hostsUp = statefulHosts.filter((host) => host.isUp());\n const hostsTimedOut = statefulHosts.filter((host) => host.isTimedOut());\n const hostsAvailable = [...hostsUp, ...hostsTimedOut];\n const compatibleHostsAvailable = hostsAvailable.length > 0 ? hostsAvailable : compatibleHosts;\n return {\n hosts: compatibleHostsAvailable,\n getTimeout(timeoutsCount, baseTimeout) {\n const timeoutMultiplier = hostsTimedOut.length === 0 && timeoutsCount === 0 ? 1 : hostsTimedOut.length + 3 + timeoutsCount;\n return timeoutMultiplier * baseTimeout;\n }\n };\n }\n async function retryableRequest(request, requestOptions, isRead = true) {\n const stackTrace = [];\n const data = serializeData(request, requestOptions);\n const headers = serializeHeaders(baseHeaders, request.headers, requestOptions.headers);\n const dataQueryParameters = request.method === \"GET\" ? {\n ...request.data,\n ...requestOptions.data\n } : {};\n const queryParameters = {\n ...baseQueryParameters,\n ...request.queryParameters,\n ...dataQueryParameters\n };\n if (algoliaAgent.value) {\n queryParameters[\"x-algolia-agent\"] = algoliaAgent.value;\n }\n if (requestOptions && requestOptions.queryParameters) {\n for (const key of Object.keys(requestOptions.queryParameters)) {\n if (!requestOptions.queryParameters[key] || Object.prototype.toString.call(requestOptions.queryParameters[key]) === \"[object Object]\") {\n queryParameters[key] = requestOptions.queryParameters[key];\n } else {\n queryParameters[key] = requestOptions.queryParameters[key].toString();\n }\n }\n }\n let timeoutsCount = 0;\n const retry = async (retryableHosts, getTimeout) => {\n const host = retryableHosts.pop();\n if (host === void 0) {\n throw new RetryError(stackTraceWithoutCredentials(stackTrace));\n }\n const timeout = { ...timeouts, ...requestOptions.timeouts };\n const payload = {\n data,\n headers,\n method: request.method,\n url: serializeUrl(host, request.path, queryParameters),\n connectTimeout: getTimeout(timeoutsCount, timeout.connect),\n responseTimeout: getTimeout(timeoutsCount, isRead ? timeout.read : timeout.write)\n };\n const pushToStackTrace = (response2) => {\n const stackFrame = {\n request: payload,\n response: response2,\n host,\n triesLeft: retryableHosts.length\n };\n stackTrace.push(stackFrame);\n return stackFrame;\n };\n const response = await requester.send(payload);\n if (isRetryable(response)) {\n const stackFrame = pushToStackTrace(response);\n if (response.isTimedOut) {\n timeoutsCount++;\n }\n logger.info(\"Retryable failure\", stackFrameWithoutCredentials(stackFrame));\n await hostsCache.set(host, createStatefulHost(host, response.isTimedOut ? \"timed out\" : \"down\"));\n return retry(retryableHosts, getTimeout);\n }\n if (isSuccess(response)) {\n return deserializeSuccess(response);\n }\n pushToStackTrace(response);\n throw deserializeFailure(response, stackTrace);\n };\n const compatibleHosts = hosts.filter(\n (host) => host.accept === \"readWrite\" || (isRead ? host.accept === \"read\" : host.accept === \"write\")\n );\n const options = await createRetryableOptions(compatibleHosts);\n return retry([...options.hosts].reverse(), options.getTimeout);\n }\n function createRequest(request, requestOptions = {}) {\n const isRead = request.useReadTransporter || request.method === \"GET\";\n if (!isRead) {\n return retryableRequest(request, requestOptions, isRead);\n }\n const createRetryableRequest = () => {\n return retryableRequest(request, requestOptions);\n };\n const cacheable = requestOptions.cacheable || request.cacheable;\n if (cacheable !== true) {\n return createRetryableRequest();\n }\n const key = {\n request,\n requestOptions,\n transporter: {\n queryParameters: baseQueryParameters,\n headers: baseHeaders\n }\n };\n return responsesCache.get(\n key,\n () => {\n return requestsCache.get(\n key,\n () => (\n /**\n * Finally, if there is no request in progress with the same key,\n * this `createRetryableRequest()` will actually trigger the\n * retryable request.\n */\n requestsCache.set(key, createRetryableRequest()).then(\n (response) => Promise.all([requestsCache.delete(key), response]),\n (err) => Promise.all([requestsCache.delete(key), Promise.reject(err)])\n ).then(([_, response]) => response)\n )\n );\n },\n {\n /**\n * Of course, once we get this response back from the server, we\n * tell response cache to actually store the received response\n * to be used later.\n */\n miss: (response) => responsesCache.set(key, response)\n }\n );\n }\n return {\n hostsCache,\n requester,\n timeouts,\n logger,\n algoliaAgent,\n baseHeaders,\n baseQueryParameters,\n hosts,\n request: createRequest,\n requestsCache,\n responsesCache\n };\n}\n\n// src/types/logger.ts\nvar LogLevelEnum = {\n Debug: 1,\n Info: 2,\n Error: 3\n};\nexport {\n AlgoliaError,\n ApiError,\n DEFAULT_CONNECT_TIMEOUT_BROWSER,\n DEFAULT_CONNECT_TIMEOUT_NODE,\n DEFAULT_READ_TIMEOUT_BROWSER,\n DEFAULT_READ_TIMEOUT_NODE,\n DEFAULT_WRITE_TIMEOUT_BROWSER,\n DEFAULT_WRITE_TIMEOUT_NODE,\n DeserializationError,\n DetailedApiError,\n ErrorWithStackTrace,\n LogLevelEnum,\n RetryError,\n createAlgoliaAgent,\n createAuth,\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createIterablePromise,\n createMemoryCache,\n createNullCache,\n createNullLogger,\n createStatefulHost,\n createTransporter,\n deserializeFailure,\n deserializeSuccess,\n getAlgoliaAgent,\n isNetworkError,\n isRetryable,\n isSuccess,\n serializeData,\n serializeHeaders,\n serializeQueryParameters,\n serializeUrl,\n shuffle,\n stackFrameWithoutCredentials,\n stackTraceWithoutCredentials\n};\n//# sourceMappingURL=common.js.map","// builds/browser.ts\nimport { createXhrRequester } from \"@algolia/requester-browser-xhr\";\nimport {\n createBrowserLocalStorageCache,\n createFallbackableCache,\n createMemoryCache,\n createNullLogger\n} from \"@algolia/client-common\";\n\n// src/recommendClient.ts\nimport { createAuth, createTransporter, getAlgoliaAgent, shuffle } from \"@algolia/client-common\";\nvar apiClientVersion = \"5.19.0\";\nfunction getDefaultHosts(appId) {\n return [\n {\n url: `${appId}-dsn.algolia.net`,\n accept: \"read\",\n protocol: \"https\"\n },\n {\n url: `${appId}.algolia.net`,\n accept: \"write\",\n protocol: \"https\"\n }\n ].concat(\n shuffle([\n {\n url: `${appId}-1.algolianet.com`,\n accept: \"readWrite\",\n protocol: \"https\"\n },\n {\n url: `${appId}-2.algolianet.com`,\n accept: \"readWrite\",\n protocol: \"https\"\n },\n {\n url: `${appId}-3.algolianet.com`,\n accept: \"readWrite\",\n protocol: \"https\"\n }\n ])\n );\n}\nfunction createRecommendClient({\n appId: appIdOption,\n apiKey: apiKeyOption,\n authMode,\n algoliaAgents,\n ...options\n}) {\n const auth = createAuth(appIdOption, apiKeyOption, authMode);\n const transporter = createTransporter({\n hosts: getDefaultHosts(appIdOption),\n ...options,\n algoliaAgent: getAlgoliaAgent({\n algoliaAgents,\n client: \"Recommend\",\n version: apiClientVersion\n }),\n baseHeaders: {\n \"content-type\": \"text/plain\",\n ...auth.headers(),\n ...options.baseHeaders\n },\n baseQueryParameters: {\n ...auth.queryParameters(),\n ...options.baseQueryParameters\n }\n });\n return {\n transporter,\n /**\n * The `appId` currently in use.\n */\n appId: appIdOption,\n /**\n * The `apiKey` currently in use.\n */\n apiKey: apiKeyOption,\n /**\n * Clears the cache of the transporter for the `requestsCache` and `responsesCache` properties.\n */\n clearCache() {\n return Promise.all([transporter.requestsCache.clear(), transporter.responsesCache.clear()]).then(() => void 0);\n },\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return transporter.algoliaAgent.value;\n },\n /**\n * Adds a `segment` to the `x-algolia-agent` sent with every requests.\n *\n * @param segment - The algolia agent (user-agent) segment to add.\n * @param version - The version of the agent.\n */\n addAlgoliaAgent(segment, version) {\n transporter.algoliaAgent.add({ segment, version });\n },\n /**\n * Helper method to switch the API key used to authenticate the requests.\n *\n * @param params - Method params.\n * @param params.apiKey - The new API Key to use.\n */\n setClientApiKey({ apiKey }) {\n if (!authMode || authMode === \"WithinHeaders\") {\n transporter.baseHeaders[\"x-algolia-api-key\"] = apiKey;\n } else {\n transporter.baseQueryParameters[\"x-algolia-api-key\"] = apiKey;\n }\n },\n /**\n * Create or update a batch of Recommend Rules Each Recommend Rule is created or updated, depending on whether a Recommend Rule with the same `objectID` already exists. You may also specify `true` for `clearExistingRules`, in which case the batch will atomically replace all the existing Recommend Rules. Recommend Rules are similar to Search Rules, except that the conditions and consequences apply to a [source item](/doc/guides/algolia-recommend/overview/#recommend-models) instead of a query. The main differences are the following: - Conditions `pattern` and `anchoring` are unavailable. - Condition `filters` triggers if the source item matches the specified filters. - Condition `filters` accepts numeric filters. - Consequence `params` only covers filtering parameters. - Consequence `automaticFacetFilters` doesn\\'t require a facet value placeholder (it tries to match the data source item\\'s attributes instead).\n *\n * Required API Key ACLs:\n * - editSettings\n * @param batchRecommendRules - The batchRecommendRules object.\n * @param batchRecommendRules.indexName - Name of the index on which to perform the operation.\n * @param batchRecommendRules.model - [Recommend model](https://www.algolia.com/doc/guides/algolia-recommend/overview/#recommend-models).\n * @param batchRecommendRules.recommendRule - The recommendRule object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n batchRecommendRules({ indexName, model, recommendRule }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `batchRecommendRules`.\");\n }\n if (!model) {\n throw new Error(\"Parameter `model` is required when calling `batchRecommendRules`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{model}/recommend/rules/batch\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{model}\", encodeURIComponent(model));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: recommendRule ? recommendRule : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customDelete - The customDelete object.\n * @param customDelete.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customDelete.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customDelete({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customDelete`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customGet - The customGet object.\n * @param customGet.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customGet.parameters - Query parameters to apply to the current query.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customGet({ path, parameters }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customGet`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPost - The customPost object.\n * @param customPost.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPost.parameters - Query parameters to apply to the current query.\n * @param customPost.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPost({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPost`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * This method allow you to send requests to the Algolia REST API.\n * @param customPut - The customPut object.\n * @param customPut.path - Path of the endpoint, anything after \\\"/1\\\" must be specified.\n * @param customPut.parameters - Query parameters to apply to the current query.\n * @param customPut.body - Parameters to send with the custom request.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n customPut({ path, parameters, body }, requestOptions) {\n if (!path) {\n throw new Error(\"Parameter `path` is required when calling `customPut`.\");\n }\n const requestPath = \"/{path}\".replace(\"{path}\", path);\n const headers = {};\n const queryParameters = parameters ? parameters : {};\n const request = {\n method: \"PUT\",\n path: requestPath,\n queryParameters,\n headers,\n data: body ? body : {}\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Deletes a Recommend rule from a recommendation scenario.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param deleteRecommendRule - The deleteRecommendRule object.\n * @param deleteRecommendRule.indexName - Name of the index on which to perform the operation.\n * @param deleteRecommendRule.model - [Recommend model](https://www.algolia.com/doc/guides/algolia-recommend/overview/#recommend-models).\n * @param deleteRecommendRule.objectID - Unique record identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n deleteRecommendRule({ indexName, model, objectID }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `deleteRecommendRule`.\");\n }\n if (!model) {\n throw new Error(\"Parameter `model` is required when calling `deleteRecommendRule`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `deleteRecommendRule`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{model}/recommend/rules/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{model}\", encodeURIComponent(model)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"DELETE\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves a Recommend rule that you previously created in the Algolia dashboard.\n *\n * Required API Key ACLs:\n * - settings\n * @param getRecommendRule - The getRecommendRule object.\n * @param getRecommendRule.indexName - Name of the index on which to perform the operation.\n * @param getRecommendRule.model - [Recommend model](https://www.algolia.com/doc/guides/algolia-recommend/overview/#recommend-models).\n * @param getRecommendRule.objectID - Unique record identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getRecommendRule({ indexName, model, objectID }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getRecommendRule`.\");\n }\n if (!model) {\n throw new Error(\"Parameter `model` is required when calling `getRecommendRule`.\");\n }\n if (!objectID) {\n throw new Error(\"Parameter `objectID` is required when calling `getRecommendRule`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{model}/recommend/rules/{objectID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{model}\", encodeURIComponent(model)).replace(\"{objectID}\", encodeURIComponent(objectID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Checks the status of a given task. Deleting a Recommend rule is asynchronous. When you delete a rule, a task is created on a queue and completed depending on the load on the server. The API response includes a task ID that you can use to check the status.\n *\n * Required API Key ACLs:\n * - editSettings\n * @param getRecommendStatus - The getRecommendStatus object.\n * @param getRecommendStatus.indexName - Name of the index on which to perform the operation.\n * @param getRecommendStatus.model - [Recommend model](https://www.algolia.com/doc/guides/algolia-recommend/overview/#recommend-models).\n * @param getRecommendStatus.taskID - Unique task identifier.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getRecommendStatus({ indexName, model, taskID }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `getRecommendStatus`.\");\n }\n if (!model) {\n throw new Error(\"Parameter `model` is required when calling `getRecommendStatus`.\");\n }\n if (!taskID) {\n throw new Error(\"Parameter `taskID` is required when calling `getRecommendStatus`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{model}/task/{taskID}\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{model}\", encodeURIComponent(model)).replace(\"{taskID}\", encodeURIComponent(taskID));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"GET\",\n path: requestPath,\n queryParameters,\n headers\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Retrieves recommendations from selected AI models.\n *\n * Required API Key ACLs:\n * - search\n * @param getRecommendationsParams - The getRecommendationsParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n getRecommendations(getRecommendationsParams, requestOptions) {\n if (getRecommendationsParams && Array.isArray(getRecommendationsParams)) {\n const newSignatureRequest = {\n requests: getRecommendationsParams\n };\n getRecommendationsParams = newSignatureRequest;\n }\n if (!getRecommendationsParams) {\n throw new Error(\"Parameter `getRecommendationsParams` is required when calling `getRecommendations`.\");\n }\n if (!getRecommendationsParams.requests) {\n throw new Error(\"Parameter `getRecommendationsParams.requests` is required when calling `getRecommendations`.\");\n }\n const requestPath = \"/1/indexes/*/recommendations\";\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: getRecommendationsParams,\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n },\n /**\n * Searches for Recommend rules. Use an empty query to list all rules for this recommendation scenario.\n *\n * Required API Key ACLs:\n * - settings\n * @param searchRecommendRules - The searchRecommendRules object.\n * @param searchRecommendRules.indexName - Name of the index on which to perform the operation.\n * @param searchRecommendRules.model - [Recommend model](https://www.algolia.com/doc/guides/algolia-recommend/overview/#recommend-models).\n * @param searchRecommendRules.searchRecommendRulesParams - The searchRecommendRulesParams object.\n * @param requestOptions - The requestOptions to send along with the query, they will be merged with the transporter requestOptions.\n */\n searchRecommendRules({ indexName, model, searchRecommendRulesParams }, requestOptions) {\n if (!indexName) {\n throw new Error(\"Parameter `indexName` is required when calling `searchRecommendRules`.\");\n }\n if (!model) {\n throw new Error(\"Parameter `model` is required when calling `searchRecommendRules`.\");\n }\n const requestPath = \"/1/indexes/{indexName}/{model}/recommend/rules/search\".replace(\"{indexName}\", encodeURIComponent(indexName)).replace(\"{model}\", encodeURIComponent(model));\n const headers = {};\n const queryParameters = {};\n const request = {\n method: \"POST\",\n path: requestPath,\n queryParameters,\n headers,\n data: searchRecommendRulesParams ? searchRecommendRulesParams : {},\n useReadTransporter: true,\n cacheable: true\n };\n return transporter.request(request, requestOptions);\n }\n };\n}\n\n// builds/browser.ts\nfunction recommendClient(appId, apiKey, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n return createRecommendClient({\n appId,\n apiKey,\n timeouts: {\n connect: 1e3,\n read: 2e3,\n write: 3e4\n },\n logger: createNullLogger(),\n requester: createXhrRequester(),\n algoliaAgents: [{ segment: \"Browser\" }],\n authMode: \"WithinQueryParameters\",\n responsesCache: createMemoryCache(),\n requestsCache: createMemoryCache({ serializable: false }),\n hostsCache: createFallbackableCache({\n caches: [createBrowserLocalStorageCache({ key: `${apiClientVersion}-${appId}` }), createMemoryCache()]\n }),\n ...options\n });\n}\nexport {\n apiClientVersion,\n recommendClient\n};\n//# sourceMappingURL=browser.js.map","var __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __export = (target, all) => {\n for (var name in all)\n __defProp(target, name, { get: all[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, \"default\"), secondTarget && __copyProps(secondTarget, mod, \"default\"));\n\n// builds/browser.ts\nvar browser_exports = {};\n__export(browser_exports, {\n algoliasearch: () => algoliasearch,\n apiClientVersion: () => apiClientVersion\n});\nimport { abtestingClient } from \"@algolia/client-abtesting\";\nimport { analyticsClient } from \"@algolia/client-analytics\";\nimport { insightsClient } from \"@algolia/client-insights\";\nimport { personalizationClient } from \"@algolia/client-personalization\";\nimport { querySuggestionsClient } from \"@algolia/client-query-suggestions\";\nimport { searchClient } from \"@algolia/client-search\";\nimport { ingestionClient } from \"@algolia/ingestion\";\nimport { monitoringClient } from \"@algolia/monitoring\";\nimport { recommendClient } from \"@algolia/recommend\";\n\n// builds/models.ts\nvar models_exports = {};\n__export(models_exports, {\n apiClientVersion: () => apiClientVersion\n});\n__reExport(models_exports, client_abtesting_star);\n__reExport(models_exports, client_analytics_star);\n__reExport(models_exports, client_insights_star);\n__reExport(models_exports, client_personalization_star);\n__reExport(models_exports, client_query_suggestions_star);\n__reExport(models_exports, client_search_star);\n__reExport(models_exports, ingestion_star);\n__reExport(models_exports, monitoring_star);\n__reExport(models_exports, recommend_star);\nimport { apiClientVersion } from \"@algolia/client-search\";\nimport * as client_abtesting_star from \"@algolia/client-abtesting\";\nimport * as client_analytics_star from \"@algolia/client-analytics\";\nimport * as client_insights_star from \"@algolia/client-insights\";\nimport * as client_personalization_star from \"@algolia/client-personalization\";\nimport * as client_query_suggestions_star from \"@algolia/client-query-suggestions\";\nimport * as client_search_star from \"@algolia/client-search\";\nimport * as ingestion_star from \"@algolia/ingestion\";\nimport * as monitoring_star from \"@algolia/monitoring\";\nimport * as recommend_star from \"@algolia/recommend\";\n\n// builds/browser.ts\n__reExport(browser_exports, models_exports);\nfunction algoliasearch(appId, apiKey, options) {\n if (!appId || typeof appId !== \"string\") {\n throw new Error(\"`appId` is missing.\");\n }\n if (!apiKey || typeof apiKey !== \"string\") {\n throw new Error(\"`apiKey` is missing.\");\n }\n const client = searchClient(appId, apiKey, options);\n return {\n ...client,\n /**\n * Get the value of the `algoliaAgent`, used by our libraries internally and telemetry system.\n */\n get _ua() {\n return client.transporter.algoliaAgent.value;\n },\n initAbtesting: (initOptions) => {\n return abtestingClient(\n initOptions.appId || appId,\n initOptions.apiKey || apiKey,\n initOptions.region,\n initOptions.options\n );\n },\n initAnalytics: (initOptions) => {\n return analyticsClient(\n initOptions.appId || appId,\n initOptions.apiKey || apiKey,\n initOptions.region,\n initOptions.options\n );\n },\n initIngestion: (initOptions) => {\n return ingestionClient(\n initOptions.appId || appId,\n initOptions.apiKey || apiKey,\n initOptions.region,\n initOptions.options\n );\n },\n initInsights: (initOptions) => {\n return insightsClient(\n initOptions.appId || appId,\n initOptions.apiKey || apiKey,\n initOptions.region,\n initOptions.options\n );\n },\n initMonitoring: (initOptions = {}) => {\n return monitoringClient(initOptions.appId || appId, initOptions.apiKey || apiKey, initOptions.options);\n },\n initPersonalization: (initOptions) => {\n return personalizationClient(\n initOptions.appId || appId,\n initOptions.apiKey || apiKey,\n initOptions.region,\n initOptions.options\n );\n },\n initQuerySuggestions: (initOptions) => {\n return querySuggestionsClient(\n initOptions.appId || appId,\n initOptions.apiKey || apiKey,\n initOptions.region,\n initOptions.options\n );\n },\n initRecommend: (initOptions = {}) => {\n return recommendClient(initOptions.appId || appId, initOptions.apiKey || apiKey, initOptions.options);\n }\n };\n}\nexport {\n algoliasearch,\n apiClientVersion\n};\n//# sourceMappingURL=browser.js.map","/* @license\nPapa Parse\nv5.4.1\nhttps://github.com/mholt/PapaParse\nLicense: MIT\n*/\n!function(e,t){\"function\"==typeof define&&define.amd?define([],t):\"object\"==typeof module&&\"undefined\"!=typeof exports?module.exports=t():e.Papa=t()}(this,function s(){\"use strict\";var f=\"undefined\"!=typeof self?self:\"undefined\"!=typeof window?window:void 0!==f?f:{};var n=!f.document&&!!f.postMessage,o=f.IS_PAPA_WORKER||!1,a={},u=0,b={parse:function(e,t){var r=(t=t||{}).dynamicTyping||!1;J(r)&&(t.dynamicTypingFunction=r,r={});if(t.dynamicTyping=r,t.transform=!!J(t.transform)&&t.transform,t.worker&&b.WORKERS_SUPPORTED){var i=function(){if(!b.WORKERS_SUPPORTED)return!1;var e=(r=f.URL||f.webkitURL||null,i=s.toString(),b.BLOB_URL||(b.BLOB_URL=r.createObjectURL(new Blob([\"var global = (function() { if (typeof self !== 'undefined') { return self; } if (typeof window !== 'undefined') { return window; } if (typeof global !== 'undefined') { return global; } return {}; })(); global.IS_PAPA_WORKER=true; \",\"(\",i,\")();\"],{type:\"text/javascript\"})))),t=new f.Worker(e);var r,i;return t.onmessage=_,t.id=u++,a[t.id]=t}();return i.userStep=t.step,i.userChunk=t.chunk,i.userComplete=t.complete,i.userError=t.error,t.step=J(t.step),t.chunk=J(t.chunk),t.complete=J(t.complete),t.error=J(t.error),delete t.worker,void i.postMessage({input:e,config:t,workerId:i.id})}var n=null;b.NODE_STREAM_INPUT,\"string\"==typeof e?(e=function(e){if(65279===e.charCodeAt(0))return e.slice(1);return e}(e),n=t.download?new l(t):new p(t)):!0===e.readable&&J(e.read)&&J(e.on)?n=new g(t):(f.File&&e instanceof File||e instanceof Object)&&(n=new c(t));return n.stream(e)},unparse:function(e,t){var n=!1,_=!0,m=\",\",y=\"\\r\\n\",s='\"',a=s+s,r=!1,i=null,o=!1;!function(){if(\"object\"!=typeof t)return;\"string\"!=typeof t.delimiter||b.BAD_DELIMITERS.filter(function(e){return-1!==t.delimiter.indexOf(e)}).length||(m=t.delimiter);(\"boolean\"==typeof t.quotes||\"function\"==typeof t.quotes||Array.isArray(t.quotes))&&(n=t.quotes);\"boolean\"!=typeof t.skipEmptyLines&&\"string\"!=typeof t.skipEmptyLines||(r=t.skipEmptyLines);\"string\"==typeof t.newline&&(y=t.newline);\"string\"==typeof t.quoteChar&&(s=t.quoteChar);\"boolean\"==typeof t.header&&(_=t.header);if(Array.isArray(t.columns)){if(0===t.columns.length)throw new Error(\"Option columns is empty\");i=t.columns}void 0!==t.escapeChar&&(a=t.escapeChar+s);(\"boolean\"==typeof t.escapeFormulae||t.escapeFormulae instanceof RegExp)&&(o=t.escapeFormulae instanceof RegExp?t.escapeFormulae:/^[=+\\-@\\t\\r].*$/)}();var u=new RegExp(Q(s),\"g\");\"string\"==typeof e&&(e=JSON.parse(e));if(Array.isArray(e)){if(!e.length||Array.isArray(e[0]))return h(null,e,r);if(\"object\"==typeof e[0])return h(i||Object.keys(e[0]),e,r)}else if(\"object\"==typeof e)return\"string\"==typeof e.data&&(e.data=JSON.parse(e.data)),Array.isArray(e.data)&&(e.fields||(e.fields=e.meta&&e.meta.fields||i),e.fields||(e.fields=Array.isArray(e.data[0])?e.fields:\"object\"==typeof e.data[0]?Object.keys(e.data[0]):[]),Array.isArray(e.data[0])||\"object\"==typeof e.data[0]||(e.data=[e.data])),h(e.fields||[],e.data||[],r);throw new Error(\"Unable to serialize unrecognized input\");function h(e,t,r){var i=\"\";\"string\"==typeof e&&(e=JSON.parse(e)),\"string\"==typeof t&&(t=JSON.parse(t));var n=Array.isArray(e)&&0=this._config.preview;if(o)f.postMessage({results:n,workerId:b.WORKER_ID,finished:a});else if(J(this._config.chunk)&&!t){if(this._config.chunk(n,this._handle),this._handle.paused()||this._handle.aborted())return void(this._halted=!0);n=void 0,this._completeResults=void 0}return this._config.step||this._config.chunk||(this._completeResults.data=this._completeResults.data.concat(n.data),this._completeResults.errors=this._completeResults.errors.concat(n.errors),this._completeResults.meta=n.meta),this._completed||!a||!J(this._config.complete)||n&&n.meta.aborted||(this._config.complete(this._completeResults,this._input),this._completed=!0),a||n&&n.meta.paused||this._nextChunk(),n}this._halted=!0},this._sendError=function(e){J(this._config.error)?this._config.error(e):o&&this._config.error&&f.postMessage({workerId:b.WORKER_ID,error:e,finished:!1})}}function l(e){var i;(e=e||{}).chunkSize||(e.chunkSize=b.RemoteChunkSize),h.call(this,e),this._nextChunk=n?function(){this._readChunk(),this._chunkLoaded()}:function(){this._readChunk()},this.stream=function(e){this._input=e,this._nextChunk()},this._readChunk=function(){if(this._finished)this._chunkLoaded();else{if(i=new XMLHttpRequest,this._config.withCredentials&&(i.withCredentials=this._config.withCredentials),n||(i.onload=v(this._chunkLoaded,this),i.onerror=v(this._chunkError,this)),i.open(this._config.downloadRequestBody?\"POST\":\"GET\",this._input,!n),this._config.downloadRequestHeaders){var e=this._config.downloadRequestHeaders;for(var t in e)i.setRequestHeader(t,e[t])}if(this._config.chunkSize){var r=this._start+this._config.chunkSize-1;i.setRequestHeader(\"Range\",\"bytes=\"+this._start+\"-\"+r)}try{i.send(this._config.downloadRequestBody)}catch(e){this._chunkError(e.message)}n&&0===i.status&&this._chunkError()}},this._chunkLoaded=function(){4===i.readyState&&(i.status<200||400<=i.status?this._chunkError():(this._start+=this._config.chunkSize?this._config.chunkSize:i.responseText.length,this._finished=!this._config.chunkSize||this._start>=function(e){var t=e.getResponseHeader(\"Content-Range\");if(null===t)return-1;return parseInt(t.substring(t.lastIndexOf(\"/\")+1))}(i),this.parseChunk(i.responseText)))},this._chunkError=function(e){var t=i.statusText||e;this._sendError(new Error(t))}}function c(e){var i,n;(e=e||{}).chunkSize||(e.chunkSize=b.LocalChunkSize),h.call(this,e);var s=\"undefined\"!=typeof FileReader;this.stream=function(e){this._input=e,n=e.slice||e.webkitSlice||e.mozSlice,s?((i=new FileReader).onload=v(this._chunkLoaded,this),i.onerror=v(this._chunkError,this)):i=new FileReaderSync,this._nextChunk()},this._nextChunk=function(){this._finished||this._config.preview&&!(this._rowCount=this._input.size,this.parseChunk(e.target.result)},this._chunkError=function(){this._sendError(i.error)}}function p(e){var r;h.call(this,e=e||{}),this.stream=function(e){return r=e,this._nextChunk()},this._nextChunk=function(){if(!this._finished){var e,t=this._config.chunkSize;return t?(e=r.substring(0,t),r=r.substring(t)):(e=r,r=\"\"),this._finished=!r,this.parseChunk(e)}}}function g(e){h.call(this,e=e||{});var t=[],r=!0,i=!1;this.pause=function(){h.prototype.pause.apply(this,arguments),this._input.pause()},this.resume=function(){h.prototype.resume.apply(this,arguments),this._input.resume()},this.stream=function(e){this._input=e,this._input.on(\"data\",this._streamData),this._input.on(\"end\",this._streamEnd),this._input.on(\"error\",this._streamError)},this._checkIsFinished=function(){i&&1===t.length&&(this._finished=!0)},this._nextChunk=function(){this._checkIsFinished(),t.length?this.parseChunk(t.shift()):r=!0},this._streamData=v(function(e){try{t.push(\"string\"==typeof e?e:e.toString(this._config.encoding)),r&&(r=!1,this._checkIsFinished(),this.parseChunk(t.shift()))}catch(e){this._streamError(e)}},this),this._streamError=v(function(e){this._streamCleanUp(),this._sendError(e)},this),this._streamEnd=v(function(){this._streamCleanUp(),i=!0,this._streamData(\"\")},this),this._streamCleanUp=v(function(){this._input.removeListener(\"data\",this._streamData),this._input.removeListener(\"end\",this._streamEnd),this._input.removeListener(\"error\",this._streamError)},this)}function r(m){var a,o,u,i=Math.pow(2,53),n=-i,s=/^\\s*-?(\\d+\\.?|\\.\\d+|\\d+\\.\\d+)([eE][-+]?\\d+)?\\s*$/,h=/^((\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z)))$/,t=this,r=0,f=0,d=!1,e=!1,l=[],c={data:[],errors:[],meta:{}};if(J(m.step)){var p=m.step;m.step=function(e){if(c=e,_())g();else{if(g(),0===c.data.length)return;r+=e.data.length,m.preview&&r>m.preview?o.abort():(c.data=c.data[0],p(c,t))}}}function y(e){return\"greedy\"===m.skipEmptyLines?\"\"===e.join(\"\").trim():1===e.length&&0===e[0].length}function g(){return c&&u&&(k(\"Delimiter\",\"UndetectableDelimiter\",\"Unable to auto-detect delimiting character; defaulted to '\"+b.DefaultDelimiter+\"'\"),u=!1),m.skipEmptyLines&&(c.data=c.data.filter(function(e){return!y(e)})),_()&&function(){if(!c)return;function e(e,t){J(m.transformHeader)&&(e=m.transformHeader(e,t)),l.push(e)}if(Array.isArray(c.data[0])){for(var t=0;_()&&t=l.length?\"__parsed_extra\":l[r]),m.transform&&(s=m.transform(s,n)),s=v(n,s),\"__parsed_extra\"===n?(i[n]=i[n]||[],i[n].push(s)):i[n]=s}return m.header&&(r>l.length?k(\"FieldMismatch\",\"TooManyFields\",\"Too many fields: expected \"+l.length+\" fields but parsed \"+r,f+t):r=i.length/2?\"\\r\\n\":\"\\r\"}(e,i)),u=!1,m.delimiter)J(m.delimiter)&&(m.delimiter=m.delimiter(e),c.meta.delimiter=m.delimiter);else{var n=function(e,t,r,i,n){var s,a,o,u;n=n||[\",\",\"\\t\",\"|\",\";\",b.RECORD_SEP,b.UNIT_SEP];for(var h=0;h=N)return L(!0)}else for(S=W,W++;;){if(-1===(S=i.indexOf(z,S+1)))return r||h.push({type:\"Quotes\",code:\"MissingQuotes\",message:\"Quoted field unterminated\",row:u.length,index:W}),T();if(S===n-1)return T(i.substring(W,S).replace(C,z));if(z!==K||i[S+1]!==K){if(z===K||0===S||i[S-1]!==K){-1!==w&&w=N)return L(!0);break}h.push({type:\"Quotes\",code:\"InvalidQuotes\",message:\"Trailing quote on quoted field is malformed\",row:u.length,index:W}),S++}}else S++}return T();function I(e){u.push(e),d=W}function A(e){var t=0;if(-1!==e){var r=i.substring(S+1,e);r&&\"\"===r.trim()&&(t=r.length)}return t}function T(e){return r||(void 0===e&&(e=i.substring(W)),f.push(e),W=n,I(f),o&&F()),L()}function D(e){W=e,I(f),f=[],R=i.indexOf(P,W)}function L(e){return{data:u,errors:h,meta:{delimiter:M,linebreak:P,aborted:H,truncated:!!e,cursor:d+(t||0)}}}function F(){q(L()),u=[],h=[]}},this.abort=function(){H=!0},this.getCharIndex=function(){return W}}function _(e){var t=e.data,r=a[t.workerId],i=!1;if(t.error)r.userError(t.error,t.file);else if(t.results&&t.results.data){var n={abort:function(){i=!0,m(t.workerId,{data:[],errors:[],meta:{aborted:!0}})},pause:y,resume:y};if(J(r.userStep)){for(var s=0;s