(our hidden element)\n // react-dom in dev mode will warn about this. There doesn't seem to be a way to render arbitrary\n // user Head without hitting this issue (our hidden element could be just \"new Document()\", but\n // this can only have 1 child, and we don't control what is being rendered so that's not an option)\n // instead we continue to render to
, and just silence warnings for and elements\n // https://github.com/facebook/react/blob/e2424f33b3ad727321fc12e75c5e94838e84c2b5/packages/react-dom-bindings/src/client/validateDOMNesting.js#L498-L520\n const originalConsoleError = console.error.bind(console)\n console.error = (...args) => {\n if (\n Array.isArray(args) &&\n args.length >= 2 &&\n args[0]?.includes?.(`validateDOMNesting(...): %s cannot appear as`) &&\n (args[1] === `` || args[1] === ``)\n ) {\n return undefined\n }\n return originalConsoleError(...args)\n }\n\n /* We set up observer to be able to regenerate after react-refresh\n updates our hidden element.\n */\n const observer = new MutationObserver(onHeadRendered)\n observer.observe(hiddenRoot, {\n attributes: true,\n childList: true,\n characterData: true,\n subtree: true,\n })\n}\n\nexport function headHandlerForBrowser({\n pageComponent,\n staticQueryResults,\n pageComponentProps,\n}) {\n useEffect(() => {\n if (pageComponent?.Head) {\n headExportValidator(pageComponent.Head)\n\n const { render } = reactDOMUtils()\n\n const HeadElement = (\n
\n )\n\n const WrapHeadElement = apiRunner(\n `wrapRootElement`,\n { element: HeadElement },\n HeadElement,\n ({ result }) => {\n return { element: result }\n }\n ).pop()\n\n render(\n // just a hack to call the callback after react has done first render\n // Note: In dev, we call onHeadRendered twice( in FireCallbackInEffect and after mutualution observer dectects initail render into hiddenRoot) this is for hot reloading\n // In Prod we only call onHeadRendered in FireCallbackInEffect to render to head\n
\n \n {WrapHeadElement}\n \n ,\n hiddenRoot\n )\n }\n\n return () => {\n removePrevHeadElements()\n removeHtmlAndBodyAttributes(keysOfHtmlAndBodyAttributes)\n }\n })\n}\n","import React, { Suspense, createElement } from \"react\"\nimport PropTypes from \"prop-types\"\nimport { apiRunner } from \"./api-runner-browser\"\nimport { grabMatchParams } from \"./find-path\"\nimport { headHandlerForBrowser } from \"./head/head-export-handler-for-browser\"\n\n// Renders page\nfunction PageRenderer(props) {\n const pageComponentProps = {\n ...props,\n params: {\n ...grabMatchParams(props.location.pathname),\n ...props.pageResources.json.pageContext.__params,\n },\n }\n\n const preferDefault = m => (m && m.default) || m\n\n let pageElement\n if (props.pageResources.partialHydration) {\n pageElement = props.pageResources.partialHydration\n } else {\n pageElement = createElement(preferDefault(props.pageResources.component), {\n ...pageComponentProps,\n key: props.path || props.pageResources.page.path,\n })\n }\n\n const pageComponent = props.pageResources.head\n\n headHandlerForBrowser({\n pageComponent,\n staticQueryResults: props.pageResources.staticQueryResults,\n pageComponentProps,\n })\n\n const wrappedPage = apiRunner(\n `wrapPageElement`,\n {\n element: pageElement,\n props: pageComponentProps,\n },\n pageElement,\n ({ result }) => {\n return { element: result, props: pageComponentProps }\n }\n ).pop()\n\n return wrappedPage\n}\n\nPageRenderer.propTypes = {\n location: PropTypes.object.isRequired,\n pageResources: PropTypes.object.isRequired,\n data: PropTypes.object,\n pageContext: PropTypes.object.isRequired,\n}\n\nexport default PageRenderer\n","// This is extracted to separate module because it's shared\n// between browser and SSR code\nexport const RouteAnnouncerProps = {\n id: `gatsby-announcer`,\n style: {\n position: `absolute`,\n top: 0,\n width: 1,\n height: 1,\n padding: 0,\n overflow: `hidden`,\n clip: `rect(0, 0, 0, 0)`,\n whiteSpace: `nowrap`,\n border: 0,\n },\n \"aria-live\": `assertive`,\n \"aria-atomic\": `true`,\n}\n","import React from \"react\"\nimport PropTypes from \"prop-types\"\nimport loader, { PageResourceStatus } from \"./loader\"\nimport { maybeGetBrowserRedirect } from \"./redirect-utils.js\"\nimport { apiRunner } from \"./api-runner-browser\"\nimport emitter from \"./emitter\"\nimport { RouteAnnouncerProps } from \"./route-announcer-props\"\nimport {\n navigate as reachNavigate,\n globalHistory,\n} from \"@gatsbyjs/reach-router\"\nimport { parsePath } from \"gatsby-link\"\n\nfunction maybeRedirect(pathname) {\n const redirect = maybeGetBrowserRedirect(pathname)\n const { hash, search } = window.location\n\n if (redirect != null) {\n window.___replace(redirect.toPath + search + hash)\n return true\n } else {\n return false\n }\n}\n\n// Catch unhandled chunk loading errors and force a restart of the app.\nlet nextRoute = ``\n\nwindow.addEventListener(`unhandledrejection`, event => {\n if (/loading chunk \\d* failed./i.test(event.reason)) {\n if (nextRoute) {\n window.location.pathname = nextRoute\n }\n }\n})\n\nconst onPreRouteUpdate = (location, prevLocation) => {\n if (!maybeRedirect(location.pathname)) {\n nextRoute = location.pathname\n apiRunner(`onPreRouteUpdate`, { location, prevLocation })\n }\n}\n\nconst onRouteUpdate = (location, prevLocation) => {\n if (!maybeRedirect(location.pathname)) {\n apiRunner(`onRouteUpdate`, { location, prevLocation })\n if (\n process.env.GATSBY_QUERY_ON_DEMAND &&\n process.env.GATSBY_QUERY_ON_DEMAND_LOADING_INDICATOR === `true`\n ) {\n emitter.emit(`onRouteUpdate`, { location, prevLocation })\n }\n }\n}\n\nconst navigate = (to, options = {}) => {\n // Support forward/backward navigation with numbers\n // navigate(-2) (jumps back 2 history steps)\n // navigate(2) (jumps forward 2 history steps)\n if (typeof to === `number`) {\n globalHistory.navigate(to)\n return\n }\n\n const { pathname, search, hash } = parsePath(to)\n const redirect = maybeGetBrowserRedirect(pathname)\n\n // If we're redirecting, just replace the passed in pathname\n // to the one we want to redirect to.\n if (redirect) {\n to = redirect.toPath + search + hash\n }\n\n // If we had a service worker update, no matter the path, reload window and\n // reset the pathname whitelist\n if (window.___swUpdated) {\n window.location = pathname + search + hash\n return\n }\n\n // Start a timer to wait for a second before transitioning and showing a\n // loader in case resources aren't around yet.\n const timeoutId = setTimeout(() => {\n emitter.emit(`onDelayedLoadPageResources`, { pathname })\n apiRunner(`onRouteUpdateDelayed`, {\n location: window.location,\n })\n }, 1000)\n\n loader.loadPage(pathname + search).then(pageResources => {\n // If no page resources, then refresh the page\n // Do this, rather than simply `window.location.reload()`, so that\n // pressing the back/forward buttons work - otherwise when pressing\n // back, the browser will just change the URL and expect JS to handle\n // the change, which won't always work since it might not be a Gatsby\n // page.\n if (!pageResources || pageResources.status === PageResourceStatus.Error) {\n window.history.replaceState({}, ``, location.href)\n window.location = pathname\n clearTimeout(timeoutId)\n return\n }\n\n // If the loaded page has a different compilation hash to the\n // window, then a rebuild has occurred on the server. Reload.\n if (process.env.NODE_ENV === `production` && pageResources) {\n if (\n pageResources.page.webpackCompilationHash !==\n window.___webpackCompilationHash\n ) {\n // Purge plugin-offline cache\n if (\n `serviceWorker` in navigator &&\n navigator.serviceWorker.controller !== null &&\n navigator.serviceWorker.controller.state === `activated`\n ) {\n navigator.serviceWorker.controller.postMessage({\n gatsbyApi: `clearPathResources`,\n })\n }\n\n window.location = pathname + search + hash\n }\n }\n reachNavigate(to, options)\n clearTimeout(timeoutId)\n })\n}\n\nfunction shouldUpdateScroll(prevRouterProps, { location }) {\n const { pathname, hash } = location\n const results = apiRunner(`shouldUpdateScroll`, {\n prevRouterProps,\n // `pathname` for backwards compatibility\n pathname,\n routerProps: { location },\n getSavedScrollPosition: args => [\n 0,\n // FIXME this is actually a big code smell, we should fix this\n // eslint-disable-next-line @babel/no-invalid-this\n this._stateStorage.read(args, args.key),\n ],\n })\n if (results.length > 0) {\n // Use the latest registered shouldUpdateScroll result, this allows users to override plugin's configuration\n // @see https://github.com/gatsbyjs/gatsby/issues/12038\n return results[results.length - 1]\n }\n\n if (prevRouterProps) {\n const {\n location: { pathname: oldPathname },\n } = prevRouterProps\n if (oldPathname === pathname) {\n // Scroll to element if it exists, if it doesn't, or no hash is provided,\n // scroll to top.\n return hash ? decodeURI(hash.slice(1)) : [0, 0]\n }\n }\n return true\n}\n\nfunction init() {\n // The \"scroll-behavior\" package expects the \"action\" to be on the location\n // object so let's copy it over.\n globalHistory.listen(args => {\n args.location.action = args.action\n })\n\n window.___push = to => navigate(to, { replace: false })\n window.___replace = to => navigate(to, { replace: true })\n window.___navigate = (to, options) => navigate(to, options)\n}\n\nclass RouteAnnouncer extends React.Component {\n constructor(props) {\n super(props)\n this.announcementRef = React.createRef()\n }\n\n componentDidUpdate(prevProps, nextProps) {\n requestAnimationFrame(() => {\n let pageName = `new page at ${this.props.location.pathname}`\n if (document.title) {\n pageName = document.title\n }\n const pageHeadings = document.querySelectorAll(`#gatsby-focus-wrapper h1`)\n if (pageHeadings && pageHeadings.length) {\n pageName = pageHeadings[0].textContent\n }\n const newAnnouncement = `Navigated to ${pageName}`\n if (this.announcementRef.current) {\n const oldAnnouncement = this.announcementRef.current.innerText\n if (oldAnnouncement !== newAnnouncement) {\n this.announcementRef.current.innerText = newAnnouncement\n }\n }\n })\n }\n\n render() {\n return
\n }\n}\n\nconst compareLocationProps = (prevLocation, nextLocation) => {\n if (prevLocation.href !== nextLocation.href) {\n return true\n }\n\n if (prevLocation?.state?.key !== nextLocation?.state?.key) {\n return true\n }\n\n return false\n}\n\n// Fire on(Pre)RouteUpdate APIs\nclass RouteUpdates extends React.Component {\n constructor(props) {\n super(props)\n onPreRouteUpdate(props.location, null)\n }\n\n componentDidMount() {\n onRouteUpdate(this.props.location, null)\n }\n\n shouldComponentUpdate(nextProps) {\n if (compareLocationProps(this.props.location, nextProps.location)) {\n onPreRouteUpdate(nextProps.location, this.props.location)\n return true\n }\n return false\n }\n\n componentDidUpdate(prevProps) {\n if (compareLocationProps(prevProps.location, this.props.location)) {\n onRouteUpdate(this.props.location, prevProps.location)\n }\n }\n\n render() {\n return (\n
\n {this.props.children}\n \n \n )\n }\n}\n\nRouteUpdates.propTypes = {\n location: PropTypes.object.isRequired,\n}\n\nexport { init, shouldUpdateScroll, RouteUpdates, maybeGetBrowserRedirect }\n","// Pulled from react-compat\n// https://github.com/developit/preact-compat/blob/7c5de00e7c85e2ffd011bf3af02899b63f699d3a/src/index.js#L349\nfunction shallowDiffers(a, b) {\n for (var i in a) {\n if (!(i in b)) return true;\n }for (var _i in b) {\n if (a[_i] !== b[_i]) return true;\n }return false;\n}\n\nexport default (function (instance, nextProps, nextState) {\n return shallowDiffers(instance.props, nextProps) || shallowDiffers(instance.state, nextState);\n});","import React from \"react\"\nimport loader, { PageResourceStatus } from \"./loader\"\nimport shallowCompare from \"shallow-compare\"\n\nclass EnsureResources extends React.Component {\n constructor(props) {\n super()\n const { location, pageResources } = props\n this.state = {\n location: { ...location },\n pageResources:\n pageResources ||\n loader.loadPageSync(location.pathname + location.search, {\n withErrorDetails: true,\n }),\n }\n }\n\n static getDerivedStateFromProps({ location }, prevState) {\n if (prevState.location.href !== location.href) {\n const pageResources = loader.loadPageSync(\n location.pathname + location.search,\n {\n withErrorDetails: true,\n }\n )\n\n return {\n pageResources,\n location: { ...location },\n }\n }\n\n return {\n location: { ...location },\n }\n }\n\n loadResources(rawPath) {\n loader.loadPage(rawPath).then(pageResources => {\n if (pageResources && pageResources.status !== PageResourceStatus.Error) {\n this.setState({\n location: { ...window.location },\n pageResources,\n })\n } else {\n window.history.replaceState({}, ``, location.href)\n window.location = rawPath\n }\n })\n }\n\n shouldComponentUpdate(nextProps, nextState) {\n // Always return false if we're missing resources.\n if (!nextState.pageResources) {\n this.loadResources(\n nextProps.location.pathname + nextProps.location.search\n )\n return false\n }\n\n if (\n process.env.BUILD_STAGE === `develop` &&\n nextState.pageResources.stale\n ) {\n this.loadResources(\n nextProps.location.pathname + nextProps.location.search\n )\n return false\n }\n\n // Check if the component or json have changed.\n if (this.state.pageResources !== nextState.pageResources) {\n return true\n }\n if (\n this.state.pageResources.component !== nextState.pageResources.component\n ) {\n return true\n }\n\n if (this.state.pageResources.json !== nextState.pageResources.json) {\n return true\n }\n // Check if location has changed on a page using internal routing\n // via matchPath configuration.\n if (\n this.state.location.key !== nextState.location.key &&\n nextState.pageResources.page &&\n (nextState.pageResources.page.matchPath ||\n nextState.pageResources.page.path)\n ) {\n return true\n }\n return shallowCompare(this, nextProps, nextState)\n }\n\n render() {\n if (\n process.env.NODE_ENV !== `production` &&\n (!this.state.pageResources ||\n this.state.pageResources.status === PageResourceStatus.Error)\n ) {\n const message = `EnsureResources was not able to find resources for path: \"${this.props.location.pathname}\"\nThis typically means that an issue occurred building components for that path.\nRun \\`gatsby clean\\` to remove any cached elements.`\n if (this.state.pageResources?.error) {\n console.error(message)\n throw this.state.pageResources.error\n }\n\n throw new Error(message)\n }\n\n return this.props.children(this.state)\n }\n}\n\nexport default EnsureResources\n","import { apiRunner, apiRunnerAsync } from \"./api-runner-browser\"\nimport React from \"react\"\nimport { Router, navigate, Location, BaseContext } from \"@gatsbyjs/reach-router\"\nimport { ScrollContext } from \"gatsby-react-router-scroll\"\nimport { StaticQueryContext } from \"./static-query\"\nimport {\n SlicesMapContext,\n SlicesContext,\n SlicesResultsContext,\n} from \"./slice/context\"\nimport {\n shouldUpdateScroll,\n init as navigationInit,\n RouteUpdates,\n} from \"./navigation\"\nimport emitter from \"./emitter\"\nimport PageRenderer from \"./page-renderer\"\nimport asyncRequires from \"$virtual/async-requires\"\nimport {\n setLoader,\n ProdLoader,\n publicLoader,\n PageResourceStatus,\n getStaticQueryResults,\n getSliceResults,\n} from \"./loader\"\nimport EnsureResources from \"./ensure-resources\"\nimport stripPrefix from \"./strip-prefix\"\n\n// Generated during bootstrap\nimport matchPaths from \"$virtual/match-paths.json\"\nimport { reactDOMUtils } from \"./react-dom-utils\"\n\nconst loader = new ProdLoader(asyncRequires, matchPaths, window.pageData)\nsetLoader(loader)\nloader.setApiRunner(apiRunner)\n\nconst { render, hydrate } = reactDOMUtils()\n\nwindow.asyncRequires = asyncRequires\nwindow.___emitter = emitter\nwindow.___loader = publicLoader\n\nnavigationInit()\n\nconst reloadStorageKey = `gatsby-reload-compilation-hash-match`\n\napiRunnerAsync(`onClientEntry`).then(() => {\n // Let plugins register a service worker. The plugin just needs\n // to return true.\n if (apiRunner(`registerServiceWorker`).filter(Boolean).length > 0) {\n require(`./register-service-worker`)\n }\n\n // In gatsby v2 if Router is used in page using matchPaths\n // paths need to contain full path.\n // For example:\n // - page have `/app/*` matchPath\n // - inside template user needs to use `/app/xyz` as path\n // Resetting `basepath`/`baseuri` keeps current behaviour\n // to not introduce breaking change.\n // Remove this in v3\n const RouteHandler = props => (\n
\n \n \n )\n\n const DataContext = React.createContext({})\n\n const slicesContext = {\n renderEnvironment: `browser`,\n }\n\n class GatsbyRoot extends React.Component {\n render() {\n const { children } = this.props\n return (\n
\n {({ location }) => (\n \n {({ pageResources, location }) => {\n const staticQueryResults = getStaticQueryResults()\n const sliceResults = getSliceResults()\n\n return (\n \n \n \n \n \n {children}\n \n \n \n \n \n )\n }}\n \n )}\n \n )\n }\n }\n\n class LocationHandler extends React.Component {\n render() {\n return (\n
\n {({ pageResources, location }) => (\n \n \n \n \n \n \n \n )}\n \n )\n }\n }\n\n const { pagePath, location: browserLoc } = window\n\n // Explicitly call navigate if the canonical path (window.pagePath)\n // is different to the browser path (window.location.pathname). SSR\n // page paths might include search params, while SSG and DSG won't.\n // If page path include search params we also compare query params.\n // But only if NONE of the following conditions hold:\n //\n // - The url matches a client side route (page.matchPath)\n // - it's a 404 page\n // - it's the offline plugin shell (/offline-plugin-app-shell-fallback/)\n if (\n pagePath &&\n __BASE_PATH__ + pagePath !==\n browserLoc.pathname + (pagePath.includes(`?`) ? browserLoc.search : ``) &&\n !(\n loader.findMatchPath(stripPrefix(browserLoc.pathname, __BASE_PATH__)) ||\n pagePath.match(/^\\/(404|500)(\\/?|.html)$/) ||\n pagePath.match(/^\\/offline-plugin-app-shell-fallback\\/?$/)\n )\n ) {\n navigate(\n __BASE_PATH__ +\n pagePath +\n (!pagePath.includes(`?`) ? browserLoc.search : ``) +\n browserLoc.hash,\n {\n replace: true,\n }\n )\n }\n\n // It's possible that sessionStorage can throw an exception if access is not granted, see https://github.com/gatsbyjs/gatsby/issues/34512\n const getSessionStorage = () => {\n try {\n return sessionStorage\n } catch {\n return null\n }\n }\n\n publicLoader.loadPage(browserLoc.pathname + browserLoc.search).then(page => {\n const sessionStorage = getSessionStorage()\n\n if (\n page?.page?.webpackCompilationHash &&\n page.page.webpackCompilationHash !== window.___webpackCompilationHash\n ) {\n // Purge plugin-offline cache\n if (\n `serviceWorker` in navigator &&\n navigator.serviceWorker.controller !== null &&\n navigator.serviceWorker.controller.state === `activated`\n ) {\n navigator.serviceWorker.controller.postMessage({\n gatsbyApi: `clearPathResources`,\n })\n }\n\n // We have not matching html + js (inlined `window.___webpackCompilationHash`)\n // with our data (coming from `app-data.json` file). This can cause issues such as\n // errors trying to load static queries (as list of static queries is inside `page-data`\n // which might not match to currently loaded `.js` scripts).\n // We are making attempt to reload if hashes don't match, but we also have to handle case\n // when reload doesn't fix it (possibly broken deploy) so we don't end up in infinite reload loop\n if (sessionStorage) {\n const isReloaded = sessionStorage.getItem(reloadStorageKey) === `1`\n\n if (!isReloaded) {\n sessionStorage.setItem(reloadStorageKey, `1`)\n window.location.reload(true)\n return\n }\n }\n }\n\n if (sessionStorage) {\n sessionStorage.removeItem(reloadStorageKey)\n }\n\n if (!page || page.status === PageResourceStatus.Error) {\n const message = `page resources for ${browserLoc.pathname} not found. Not rendering React`\n\n // if the chunk throws an error we want to capture the real error\n // This should help with https://github.com/gatsbyjs/gatsby/issues/19618\n if (page && page.error) {\n console.error(message)\n throw page.error\n }\n\n throw new Error(message)\n }\n\n const SiteRoot = apiRunner(\n `wrapRootElement`,\n { element:
},\n
,\n ({ result }) => {\n return { element: result }\n }\n ).pop()\n\n const App = function App() {\n const onClientEntryRanRef = React.useRef(false)\n\n React.useEffect(() => {\n if (!onClientEntryRanRef.current) {\n onClientEntryRanRef.current = true\n if (performance.mark) {\n performance.mark(`onInitialClientRender`)\n }\n\n apiRunner(`onInitialClientRender`)\n }\n }, [])\n\n return
{SiteRoot}\n }\n\n const focusEl = document.getElementById(`gatsby-focus-wrapper`)\n\n // Client only pages have any empty body so we just do a normal\n // render to avoid React complaining about hydration mis-matches.\n let defaultRenderer = render\n if (focusEl && focusEl.children.length) {\n defaultRenderer = hydrate\n }\n\n const renderer = apiRunner(\n `replaceHydrateFunction`,\n undefined,\n defaultRenderer\n )[0]\n\n function runRender() {\n const rootElement =\n typeof window !== `undefined`\n ? document.getElementById(`___gatsby`)\n : null\n\n renderer(
, rootElement)\n }\n\n // https://github.com/madrobby/zepto/blob/b5ed8d607f67724788ec9ff492be297f64d47dfc/src/zepto.js#L439-L450\n // TODO remove IE 10 support\n const doc = document\n if (\n doc.readyState === `complete` ||\n (doc.readyState !== `loading` && !doc.documentElement.doScroll)\n ) {\n setTimeout(function () {\n runRender()\n }, 0)\n } else {\n const handler = function () {\n doc.removeEventListener(`DOMContentLoaded`, handler, false)\n window.removeEventListener(`load`, handler, false)\n\n runRender()\n }\n\n doc.addEventListener(`DOMContentLoaded`, handler, false)\n window.addEventListener(`load`, handler, false)\n }\n\n return\n })\n})\n","import React from \"react\"\nimport PropTypes from \"prop-types\"\n\nimport loader from \"./loader\"\nimport InternalPageRenderer from \"./page-renderer\"\n\nconst ProdPageRenderer = ({ location }) => {\n const pageResources = loader.loadPageSync(location.pathname)\n if (!pageResources) {\n return null\n }\n return React.createElement(InternalPageRenderer, {\n location,\n pageResources,\n ...pageResources.json,\n })\n}\n\nProdPageRenderer.propTypes = {\n location: PropTypes.shape({\n pathname: PropTypes.string.isRequired,\n }).isRequired,\n}\n\nexport default ProdPageRenderer\n","const preferDefault = m => (m && m.default) || m\n\nif (process.env.BUILD_STAGE === `develop`) {\n module.exports = preferDefault(require(`./public-page-renderer-dev`))\n} else if (process.env.BUILD_STAGE === `build-javascript`) {\n module.exports = preferDefault(require(`./public-page-renderer-prod`))\n} else {\n module.exports = () => null\n}\n","const map = new WeakMap()\n\nexport function reactDOMUtils() {\n const reactDomClient = require(`react-dom/client`)\n\n const render = (Component, el) => {\n let root = map.get(el)\n if (!root) {\n map.set(el, (root = reactDomClient.createRoot(el)))\n }\n root.render(Component)\n }\n\n const hydrate = (Component, el) => reactDomClient.hydrateRoot(el, Component)\n\n return { render, hydrate }\n}\n","import redirects from \"./redirects.json\"\n\n// Convert to a map for faster lookup in maybeRedirect()\n\nconst redirectMap = new Map()\nconst redirectIgnoreCaseMap = new Map()\n\nredirects.forEach(redirect => {\n if (redirect.ignoreCase) {\n redirectIgnoreCaseMap.set(redirect.fromPath, redirect)\n } else {\n redirectMap.set(redirect.fromPath, redirect)\n }\n})\n\nexport function maybeGetBrowserRedirect(pathname) {\n let redirect = redirectMap.get(pathname)\n if (!redirect) {\n redirect = redirectIgnoreCaseMap.get(pathname.toLowerCase())\n }\n return redirect\n}\n","import { apiRunner } from \"./api-runner-browser\"\n\nif (\n window.location.protocol !== `https:` &&\n window.location.hostname !== `localhost`\n) {\n console.error(\n `Service workers can only be used over HTTPS, or on localhost for development`\n )\n} else if (`serviceWorker` in navigator) {\n navigator.serviceWorker\n .register(`${__BASE_PATH__}/sw.js`)\n .then(function (reg) {\n reg.addEventListener(`updatefound`, () => {\n apiRunner(`onServiceWorkerUpdateFound`, { serviceWorker: reg })\n // The updatefound event implies that reg.installing is set; see\n // https://w3c.github.io/ServiceWorker/#service-worker-registration-updatefound-event\n const installingWorker = reg.installing\n console.log(`installingWorker`, installingWorker)\n installingWorker.addEventListener(`statechange`, () => {\n switch (installingWorker.state) {\n case `installed`:\n if (navigator.serviceWorker.controller) {\n // At this point, the old content will have been purged and the fresh content will\n // have been added to the cache.\n\n // We set a flag so Gatsby Link knows to refresh the page on next navigation attempt\n window.___swUpdated = true\n // We call the onServiceWorkerUpdateReady API so users can show update prompts.\n apiRunner(`onServiceWorkerUpdateReady`, { serviceWorker: reg })\n\n // If resources failed for the current page, reload.\n if (window.___failedResources) {\n console.log(`resources failed, SW updated - reloading`)\n window.location.reload()\n }\n } else {\n // At this point, everything has been precached.\n // It's the perfect time to display a \"Content is cached for offline use.\" message.\n console.log(`Content is now available offline!`)\n\n // Post to service worker that install is complete.\n // Delay to allow time for the event listener to be added --\n // otherwise fetch is called too soon and resources aren't cached.\n apiRunner(`onServiceWorkerInstalled`, { serviceWorker: reg })\n }\n break\n\n case `redundant`:\n console.error(`The installing service worker became redundant.`)\n apiRunner(`onServiceWorkerRedundant`, { serviceWorker: reg })\n break\n\n case `activated`:\n apiRunner(`onServiceWorkerActive`, { serviceWorker: reg })\n break\n }\n })\n })\n })\n .catch(function (e) {\n console.error(`Error during service worker registration:`, e)\n })\n}\n","import React from \"react\"\n\nconst SlicesResultsContext = React.createContext({})\nconst SlicesContext = React.createContext({})\nconst SlicesMapContext = React.createContext({})\nconst SlicesPropsContext = React.createContext({})\n\nexport {\n SlicesResultsContext,\n SlicesContext,\n SlicesMapContext,\n SlicesPropsContext,\n}\n","import React from \"react\"\nimport PropTypes from \"prop-types\"\nimport { createServerOrClientContext } from \"./context-utils\"\n\nconst StaticQueryContext = createServerOrClientContext(`StaticQuery`, {})\n\nfunction StaticQueryDataRenderer({ staticQueryData, data, query, render }) {\n const finalData = data\n ? data.data\n : staticQueryData[query] && staticQueryData[query].data\n\n return (\n
\n {finalData && render(finalData)}\n {!finalData && Loading (StaticQuery)
}\n \n )\n}\n\nlet warnedAboutStaticQuery = false\n\n// TODO(v6): Remove completely\nconst StaticQuery = props => {\n const { data, query, render, children } = props\n\n if (process.env.NODE_ENV === `development` && !warnedAboutStaticQuery) {\n console.warn(\n `The
component is deprecated and will be removed in Gatsby v6. Use useStaticQuery instead. Refer to the migration guide for more information: https://gatsby.dev/migrating-4-to-5/#staticquery--is-deprecated`\n )\n warnedAboutStaticQuery = true\n }\n\n return (\n
\n {staticQueryData => (\n \n )}\n \n )\n}\n\nStaticQuery.propTypes = {\n data: PropTypes.object,\n query: PropTypes.string.isRequired,\n render: PropTypes.func,\n children: PropTypes.func,\n}\n\nconst useStaticQuery = query => {\n if (\n typeof React.useContext !== `function` &&\n process.env.NODE_ENV === `development`\n ) {\n // TODO(v5): Remove since we require React >= 18\n throw new Error(\n `You're likely using a version of React that doesn't support Hooks\\n` +\n `Please update React and ReactDOM to 16.8.0 or later to use the useStaticQuery hook.`\n )\n }\n\n const context = React.useContext(StaticQueryContext)\n\n // query is a stringified number like `3303882` when wrapped with graphql, If a user forgets\n // to wrap the query in a grqphql, then casting it to a Number results in `NaN` allowing us to\n // catch the misuse of the API and give proper direction\n if (isNaN(Number(query))) {\n throw new Error(`useStaticQuery was called with a string but expects to be called using \\`graphql\\`. Try this:\n\nimport { useStaticQuery, graphql } from 'gatsby';\n\nuseStaticQuery(graphql\\`${query}\\`);\n`)\n }\n\n if (context[query]?.data) {\n return context[query].data\n } else {\n throw new Error(\n `The result of this StaticQuery could not be fetched.\\n\\n` +\n `This is likely a bug in Gatsby and if refreshing the page does not fix it, ` +\n `please open an issue in https://github.com/gatsbyjs/gatsby/issues`\n )\n }\n}\n\nexport { StaticQuery, StaticQueryContext, useStaticQuery }\n","import React from \"react\"\n\n// Ensure serverContext is not created more than once as React will throw when creating it more than once\n// https://github.com/facebook/react/blob/dd2d6522754f52c70d02c51db25eb7cbd5d1c8eb/packages/react/src/ReactServerContext.js#L101\nconst createServerContext = (name, defaultValue = null) => {\n /* eslint-disable no-undef */\n if (!globalThis.__SERVER_CONTEXT) {\n globalThis.__SERVER_CONTEXT = {}\n }\n\n if (!globalThis.__SERVER_CONTEXT[name]) {\n globalThis.__SERVER_CONTEXT[name] = React.createServerContext(\n name,\n defaultValue\n )\n }\n\n return globalThis.__SERVER_CONTEXT[name]\n}\n\nfunction createServerOrClientContext(name, defaultValue) {\n if (React.createServerContext) {\n return createServerContext(name, defaultValue)\n }\n\n return React.createContext(defaultValue)\n}\n\nexport { createServerOrClientContext }\n","/**\n * Remove a prefix from a string. Return the input string if the given prefix\n * isn't found.\n */\n\nexport default function stripPrefix(str, prefix = ``) {\n if (!prefix) {\n return str\n }\n\n if (str === prefix) {\n return `/`\n }\n\n if (str.startsWith(`${prefix}/`)) {\n return str.slice(prefix.length)\n }\n\n return str\n}\n","import { __assign } from \"tslib\";\nimport { invariant } from \"../../utilities/globals/index.js\";\nimport * as React from \"rehackt\";\nimport { getApolloContext } from \"./ApolloContext.js\";\nexport var ApolloProvider = function (_a) {\n var client = _a.client, children = _a.children;\n var ApolloContext = getApolloContext();\n var parentContext = React.useContext(ApolloContext);\n var context = React.useMemo(function () {\n return __assign(__assign({}, parentContext), { client: client || parentContext.client });\n }, [parentContext, client]);\n invariant(context.client, 47);\n return (React.createElement(ApolloContext.Provider, { value: context }, children));\n};\n//# sourceMappingURL=ApolloProvider.js.map","function _createForOfIteratorHelperLoose(o, allowArrayLike) { var it = typeof Symbol !== \"undefined\" && o[Symbol.iterator] || o[\"@@iterator\"]; if (it) return (it = it.call(o)).next.bind(it); if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === \"number\") { if (it) o = it; var i = 0; return function () { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }; } throw new TypeError(\"Invalid attempt to iterate non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\"); }\n\nfunction _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === \"string\") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === \"Object\" && o.constructor) n = o.constructor.name; if (n === \"Map\" || n === \"Set\") return Array.from(o); if (n === \"Arguments\" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }\n\nfunction _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, \"prototype\", { writable: false }); return Constructor; }\n\n// === Symbol Support ===\nvar hasSymbols = function () {\n return typeof Symbol === 'function';\n};\n\nvar hasSymbol = function (name) {\n return hasSymbols() && Boolean(Symbol[name]);\n};\n\nvar getSymbol = function (name) {\n return hasSymbol(name) ? Symbol[name] : '@@' + name;\n};\n\nif (hasSymbols() && !hasSymbol('observable')) {\n Symbol.observable = Symbol('observable');\n}\n\nvar SymbolIterator = getSymbol('iterator');\nvar SymbolObservable = getSymbol('observable');\nvar SymbolSpecies = getSymbol('species'); // === Abstract Operations ===\n\nfunction getMethod(obj, key) {\n var value = obj[key];\n if (value == null) return undefined;\n if (typeof value !== 'function') throw new TypeError(value + ' is not a function');\n return value;\n}\n\nfunction getSpecies(obj) {\n var ctor = obj.constructor;\n\n if (ctor !== undefined) {\n ctor = ctor[SymbolSpecies];\n\n if (ctor === null) {\n ctor = undefined;\n }\n }\n\n return ctor !== undefined ? ctor : Observable;\n}\n\nfunction isObservable(x) {\n return x instanceof Observable; // SPEC: Brand check\n}\n\nfunction hostReportError(e) {\n if (hostReportError.log) {\n hostReportError.log(e);\n } else {\n setTimeout(function () {\n throw e;\n });\n }\n}\n\nfunction enqueue(fn) {\n Promise.resolve().then(function () {\n try {\n fn();\n } catch (e) {\n hostReportError(e);\n }\n });\n}\n\nfunction cleanupSubscription(subscription) {\n var cleanup = subscription._cleanup;\n if (cleanup === undefined) return;\n subscription._cleanup = undefined;\n\n if (!cleanup) {\n return;\n }\n\n try {\n if (typeof cleanup === 'function') {\n cleanup();\n } else {\n var unsubscribe = getMethod(cleanup, 'unsubscribe');\n\n if (unsubscribe) {\n unsubscribe.call(cleanup);\n }\n }\n } catch (e) {\n hostReportError(e);\n }\n}\n\nfunction closeSubscription(subscription) {\n subscription._observer = undefined;\n subscription._queue = undefined;\n subscription._state = 'closed';\n}\n\nfunction flushSubscription(subscription) {\n var queue = subscription._queue;\n\n if (!queue) {\n return;\n }\n\n subscription._queue = undefined;\n subscription._state = 'ready';\n\n for (var i = 0; i < queue.length; ++i) {\n notifySubscription(subscription, queue[i].type, queue[i].value);\n if (subscription._state === 'closed') break;\n }\n}\n\nfunction notifySubscription(subscription, type, value) {\n subscription._state = 'running';\n var observer = subscription._observer;\n\n try {\n var m = getMethod(observer, type);\n\n switch (type) {\n case 'next':\n if (m) m.call(observer, value);\n break;\n\n case 'error':\n closeSubscription(subscription);\n if (m) m.call(observer, value);else throw value;\n break;\n\n case 'complete':\n closeSubscription(subscription);\n if (m) m.call(observer);\n break;\n }\n } catch (e) {\n hostReportError(e);\n }\n\n if (subscription._state === 'closed') cleanupSubscription(subscription);else if (subscription._state === 'running') subscription._state = 'ready';\n}\n\nfunction onNotify(subscription, type, value) {\n if (subscription._state === 'closed') return;\n\n if (subscription._state === 'buffering') {\n subscription._queue.push({\n type: type,\n value: value\n });\n\n return;\n }\n\n if (subscription._state !== 'ready') {\n subscription._state = 'buffering';\n subscription._queue = [{\n type: type,\n value: value\n }];\n enqueue(function () {\n return flushSubscription(subscription);\n });\n return;\n }\n\n notifySubscription(subscription, type, value);\n}\n\nvar Subscription = /*#__PURE__*/function () {\n function Subscription(observer, subscriber) {\n // ASSERT: observer is an object\n // ASSERT: subscriber is callable\n this._cleanup = undefined;\n this._observer = observer;\n this._queue = undefined;\n this._state = 'initializing';\n var subscriptionObserver = new SubscriptionObserver(this);\n\n try {\n this._cleanup = subscriber.call(undefined, subscriptionObserver);\n } catch (e) {\n subscriptionObserver.error(e);\n }\n\n if (this._state === 'initializing') this._state = 'ready';\n }\n\n var _proto = Subscription.prototype;\n\n _proto.unsubscribe = function unsubscribe() {\n if (this._state !== 'closed') {\n closeSubscription(this);\n cleanupSubscription(this);\n }\n };\n\n _createClass(Subscription, [{\n key: \"closed\",\n get: function () {\n return this._state === 'closed';\n }\n }]);\n\n return Subscription;\n}();\n\nvar SubscriptionObserver = /*#__PURE__*/function () {\n function SubscriptionObserver(subscription) {\n this._subscription = subscription;\n }\n\n var _proto2 = SubscriptionObserver.prototype;\n\n _proto2.next = function next(value) {\n onNotify(this._subscription, 'next', value);\n };\n\n _proto2.error = function error(value) {\n onNotify(this._subscription, 'error', value);\n };\n\n _proto2.complete = function complete() {\n onNotify(this._subscription, 'complete');\n };\n\n _createClass(SubscriptionObserver, [{\n key: \"closed\",\n get: function () {\n return this._subscription._state === 'closed';\n }\n }]);\n\n return SubscriptionObserver;\n}();\n\nvar Observable = /*#__PURE__*/function () {\n function Observable(subscriber) {\n if (!(this instanceof Observable)) throw new TypeError('Observable cannot be called as a function');\n if (typeof subscriber !== 'function') throw new TypeError('Observable initializer must be a function');\n this._subscriber = subscriber;\n }\n\n var _proto3 = Observable.prototype;\n\n _proto3.subscribe = function subscribe(observer) {\n if (typeof observer !== 'object' || observer === null) {\n observer = {\n next: observer,\n error: arguments[1],\n complete: arguments[2]\n };\n }\n\n return new Subscription(observer, this._subscriber);\n };\n\n _proto3.forEach = function forEach(fn) {\n var _this = this;\n\n return new Promise(function (resolve, reject) {\n if (typeof fn !== 'function') {\n reject(new TypeError(fn + ' is not a function'));\n return;\n }\n\n function done() {\n subscription.unsubscribe();\n resolve();\n }\n\n var subscription = _this.subscribe({\n next: function (value) {\n try {\n fn(value, done);\n } catch (e) {\n reject(e);\n subscription.unsubscribe();\n }\n },\n error: reject,\n complete: resolve\n });\n });\n };\n\n _proto3.map = function map(fn) {\n var _this2 = this;\n\n if (typeof fn !== 'function') throw new TypeError(fn + ' is not a function');\n var C = getSpecies(this);\n return new C(function (observer) {\n return _this2.subscribe({\n next: function (value) {\n try {\n value = fn(value);\n } catch (e) {\n return observer.error(e);\n }\n\n observer.next(value);\n },\n error: function (e) {\n observer.error(e);\n },\n complete: function () {\n observer.complete();\n }\n });\n });\n };\n\n _proto3.filter = function filter(fn) {\n var _this3 = this;\n\n if (typeof fn !== 'function') throw new TypeError(fn + ' is not a function');\n var C = getSpecies(this);\n return new C(function (observer) {\n return _this3.subscribe({\n next: function (value) {\n try {\n if (!fn(value)) return;\n } catch (e) {\n return observer.error(e);\n }\n\n observer.next(value);\n },\n error: function (e) {\n observer.error(e);\n },\n complete: function () {\n observer.complete();\n }\n });\n });\n };\n\n _proto3.reduce = function reduce(fn) {\n var _this4 = this;\n\n if (typeof fn !== 'function') throw new TypeError(fn + ' is not a function');\n var C = getSpecies(this);\n var hasSeed = arguments.length > 1;\n var hasValue = false;\n var seed = arguments[1];\n var acc = seed;\n return new C(function (observer) {\n return _this4.subscribe({\n next: function (value) {\n var first = !hasValue;\n hasValue = true;\n\n if (!first || hasSeed) {\n try {\n acc = fn(acc, value);\n } catch (e) {\n return observer.error(e);\n }\n } else {\n acc = value;\n }\n },\n error: function (e) {\n observer.error(e);\n },\n complete: function () {\n if (!hasValue && !hasSeed) return observer.error(new TypeError('Cannot reduce an empty sequence'));\n observer.next(acc);\n observer.complete();\n }\n });\n });\n };\n\n _proto3.concat = function concat() {\n var _this5 = this;\n\n for (var _len = arguments.length, sources = new Array(_len), _key = 0; _key < _len; _key++) {\n sources[_key] = arguments[_key];\n }\n\n var C = getSpecies(this);\n return new C(function (observer) {\n var subscription;\n var index = 0;\n\n function startNext(next) {\n subscription = next.subscribe({\n next: function (v) {\n observer.next(v);\n },\n error: function (e) {\n observer.error(e);\n },\n complete: function () {\n if (index === sources.length) {\n subscription = undefined;\n observer.complete();\n } else {\n startNext(C.from(sources[index++]));\n }\n }\n });\n }\n\n startNext(_this5);\n return function () {\n if (subscription) {\n subscription.unsubscribe();\n subscription = undefined;\n }\n };\n });\n };\n\n _proto3.flatMap = function flatMap(fn) {\n var _this6 = this;\n\n if (typeof fn !== 'function') throw new TypeError(fn + ' is not a function');\n var C = getSpecies(this);\n return new C(function (observer) {\n var subscriptions = [];\n\n var outer = _this6.subscribe({\n next: function (value) {\n if (fn) {\n try {\n value = fn(value);\n } catch (e) {\n return observer.error(e);\n }\n }\n\n var inner = C.from(value).subscribe({\n next: function (value) {\n observer.next(value);\n },\n error: function (e) {\n observer.error(e);\n },\n complete: function () {\n var i = subscriptions.indexOf(inner);\n if (i >= 0) subscriptions.splice(i, 1);\n completeIfDone();\n }\n });\n subscriptions.push(inner);\n },\n error: function (e) {\n observer.error(e);\n },\n complete: function () {\n completeIfDone();\n }\n });\n\n function completeIfDone() {\n if (outer.closed && subscriptions.length === 0) observer.complete();\n }\n\n return function () {\n subscriptions.forEach(function (s) {\n return s.unsubscribe();\n });\n outer.unsubscribe();\n };\n });\n };\n\n _proto3[SymbolObservable] = function () {\n return this;\n };\n\n Observable.from = function from(x) {\n var C = typeof this === 'function' ? this : Observable;\n if (x == null) throw new TypeError(x + ' is not an object');\n var method = getMethod(x, SymbolObservable);\n\n if (method) {\n var observable = method.call(x);\n if (Object(observable) !== observable) throw new TypeError(observable + ' is not an object');\n if (isObservable(observable) && observable.constructor === C) return observable;\n return new C(function (observer) {\n return observable.subscribe(observer);\n });\n }\n\n if (hasSymbol('iterator')) {\n method = getMethod(x, SymbolIterator);\n\n if (method) {\n return new C(function (observer) {\n enqueue(function () {\n if (observer.closed) return;\n\n for (var _iterator = _createForOfIteratorHelperLoose(method.call(x)), _step; !(_step = _iterator()).done;) {\n var item = _step.value;\n observer.next(item);\n if (observer.closed) return;\n }\n\n observer.complete();\n });\n });\n }\n }\n\n if (Array.isArray(x)) {\n return new C(function (observer) {\n enqueue(function () {\n if (observer.closed) return;\n\n for (var i = 0; i < x.length; ++i) {\n observer.next(x[i]);\n if (observer.closed) return;\n }\n\n observer.complete();\n });\n });\n }\n\n throw new TypeError(x + ' is not observable');\n };\n\n Observable.of = function of() {\n for (var _len2 = arguments.length, items = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {\n items[_key2] = arguments[_key2];\n }\n\n var C = typeof this === 'function' ? this : Observable;\n return new C(function (observer) {\n enqueue(function () {\n if (observer.closed) return;\n\n for (var i = 0; i < items.length; ++i) {\n observer.next(items[i]);\n if (observer.closed) return;\n }\n\n observer.complete();\n });\n });\n };\n\n _createClass(Observable, null, [{\n key: SymbolSpecies,\n get: function () {\n return this;\n }\n }]);\n\n return Observable;\n}();\n\nif (hasSymbols()) {\n Object.defineProperty(Observable, Symbol('extensions'), {\n value: {\n symbol: SymbolObservable,\n hostReportError: hostReportError\n },\n configurable: true\n });\n}\n\nexport { Observable };\n","import { __assign, __spreadArray } from \"tslib\";\nimport { invariant, newInvariantError } from \"../globals/index.js\";\n/**\n * Returns a query document which adds a single query operation that only\n * spreads the target fragment inside of it.\n *\n * So for example a document of:\n *\n * ```graphql\n * fragment foo on Foo { a b c }\n * ```\n *\n * Turns into:\n *\n * ```graphql\n * { ...foo }\n *\n * fragment foo on Foo { a b c }\n * ```\n *\n * The target fragment will either be the only fragment in the document, or a\n * fragment specified by the provided `fragmentName`. If there is more than one\n * fragment, but a `fragmentName` was not defined then an error will be thrown.\n */\nexport function getFragmentQueryDocument(document, fragmentName) {\n var actualFragmentName = fragmentName;\n // Build an array of all our fragment definitions that will be used for\n // validations. We also do some validations on the other definitions in the\n // document while building this list.\n var fragments = [];\n document.definitions.forEach(function (definition) {\n // Throw an error if we encounter an operation definition because we will\n // define our own operation definition later on.\n if (definition.kind === \"OperationDefinition\") {\n throw newInvariantError(\n 74,\n definition.operation,\n definition.name ? \" named '\".concat(definition.name.value, \"'\") : \"\"\n );\n }\n // Add our definition to the fragments array if it is a fragment\n // definition.\n if (definition.kind === \"FragmentDefinition\") {\n fragments.push(definition);\n }\n });\n // If the user did not give us a fragment name then let us try to get a\n // name from a single fragment in the definition.\n if (typeof actualFragmentName === \"undefined\") {\n invariant(fragments.length === 1, 75, fragments.length);\n actualFragmentName = fragments[0].name.value;\n }\n // Generate a query document with an operation that simply spreads the\n // fragment inside of it.\n var query = __assign(__assign({}, document), { definitions: __spreadArray([\n {\n kind: \"OperationDefinition\",\n // OperationTypeNode is an enum\n operation: \"query\",\n selectionSet: {\n kind: \"SelectionSet\",\n selections: [\n {\n kind: \"FragmentSpread\",\n name: {\n kind: \"Name\",\n value: actualFragmentName,\n },\n },\n ],\n },\n }\n ], document.definitions, true) });\n return query;\n}\n// Utility function that takes a list of fragment definitions and makes a hash out of them\n// that maps the name of the fragment to the fragment definition.\nexport function createFragmentMap(fragments) {\n if (fragments === void 0) { fragments = []; }\n var symTable = {};\n fragments.forEach(function (fragment) {\n symTable[fragment.name.value] = fragment;\n });\n return symTable;\n}\nexport function getFragmentFromSelection(selection, fragmentMap) {\n switch (selection.kind) {\n case \"InlineFragment\":\n return selection;\n case \"FragmentSpread\": {\n var fragmentName = selection.name.value;\n if (typeof fragmentMap === \"function\") {\n return fragmentMap(fragmentName);\n }\n var fragment = fragmentMap && fragmentMap[fragmentName];\n invariant(fragment, 76, fragmentName);\n return fragment || null;\n }\n default:\n return null;\n }\n}\n//# sourceMappingURL=fragments.js.map","import { AutoCleanedStrongCache, cacheSizes, } from \"../../utilities/caching/index.js\";\nimport { registerGlobalCache } from \"../caching/getMemoryInternals.js\";\n/**\n * Like JSON.stringify, but with object keys always sorted in the same order.\n *\n * To achieve performant sorting, this function uses a Map from JSON-serialized\n * arrays of keys (in any order) to sorted arrays of the same keys, with a\n * single sorted array reference shared by all permutations of the keys.\n *\n * As a drawback, this function will add a little bit more memory for every\n * object encountered that has different (more, less, a different order of) keys\n * than in the past.\n *\n * In a typical application, this extra memory usage should not play a\n * significant role, as `canonicalStringify` will be called for only a limited\n * number of object shapes, and the cache will not grow beyond a certain point.\n * But in some edge cases, this could be a problem, so we provide\n * canonicalStringify.reset() as a way of clearing the cache.\n * */\nexport var canonicalStringify = Object.assign(function canonicalStringify(value) {\n return JSON.stringify(value, stableObjectReplacer);\n}, {\n reset: function () {\n // Clearing the sortingMap will reclaim all cached memory, without\n // affecting the logical results of canonicalStringify, but potentially\n // sacrificing performance until the cache is refilled.\n sortingMap = new AutoCleanedStrongCache(cacheSizes.canonicalStringify || 1000 /* defaultCacheSizes.canonicalStringify */);\n },\n});\nif (globalThis.__DEV__ !== false) {\n registerGlobalCache(\"canonicalStringify\", function () { return sortingMap.size; });\n}\n// Values are JSON-serialized arrays of object keys (in any order), and values\n// are sorted arrays of the same keys.\nvar sortingMap;\ncanonicalStringify.reset();\n// The JSON.stringify function takes an optional second argument called a\n// replacer function. This function is called for each key-value pair in the\n// object being stringified, and its return value is used instead of the\n// original value. If the replacer function returns a new value, that value is\n// stringified as JSON instead of the original value of the property.\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#the_replacer_parameter\nfunction stableObjectReplacer(key, value) {\n if (value && typeof value === \"object\") {\n var proto = Object.getPrototypeOf(value);\n // We don't want to mess with objects that are not \"plain\" objects, which\n // means their prototype is either Object.prototype or null. This check also\n // prevents needlessly rearranging the indices of arrays.\n if (proto === Object.prototype || proto === null) {\n var keys = Object.keys(value);\n // If keys is already sorted, let JSON.stringify serialize the original\n // value instead of creating a new object with keys in the same order.\n if (keys.every(everyKeyInOrder))\n return value;\n var unsortedKey = JSON.stringify(keys);\n var sortedKeys = sortingMap.get(unsortedKey);\n if (!sortedKeys) {\n keys.sort();\n var sortedKey = JSON.stringify(keys);\n // Checking for sortedKey in the sortingMap allows us to share the same\n // sorted array reference for all permutations of the same set of keys.\n sortedKeys = sortingMap.get(sortedKey) || keys;\n sortingMap.set(unsortedKey, sortedKeys);\n sortingMap.set(sortedKey, sortedKeys);\n }\n var sortedObject_1 = Object.create(proto);\n // Reassigning the keys in sorted order will cause JSON.stringify to\n // serialize them in sorted order.\n sortedKeys.forEach(function (key) {\n sortedObject_1[key] = value[key];\n });\n return sortedObject_1;\n }\n }\n return value;\n}\n// Since everything that happens in stableObjectReplacer benefits from being as\n// efficient as possible, we use a static function as the callback for\n// keys.every in order to test if the provided keys are already sorted without\n// allocating extra memory for a callback.\nfunction everyKeyInOrder(key, i, keys) {\n return i === 0 || keys[i - 1] <= key;\n}\n//# sourceMappingURL=canonicalStringify.js.map","import { newInvariantError } from \"../globals/index.js\";\nimport { isNonNullObject } from \"../common/objects.js\";\nimport { getFragmentFromSelection } from \"./fragments.js\";\nimport { canonicalStringify } from \"../common/canonicalStringify.js\";\nexport function makeReference(id) {\n return { __ref: String(id) };\n}\nexport function isReference(obj) {\n return Boolean(obj && typeof obj === \"object\" && typeof obj.__ref === \"string\");\n}\nexport function isDocumentNode(value) {\n return (isNonNullObject(value) &&\n value.kind === \"Document\" &&\n Array.isArray(value.definitions));\n}\nfunction isStringValue(value) {\n return value.kind === \"StringValue\";\n}\nfunction isBooleanValue(value) {\n return value.kind === \"BooleanValue\";\n}\nfunction isIntValue(value) {\n return value.kind === \"IntValue\";\n}\nfunction isFloatValue(value) {\n return value.kind === \"FloatValue\";\n}\nfunction isVariable(value) {\n return value.kind === \"Variable\";\n}\nfunction isObjectValue(value) {\n return value.kind === \"ObjectValue\";\n}\nfunction isListValue(value) {\n return value.kind === \"ListValue\";\n}\nfunction isEnumValue(value) {\n return value.kind === \"EnumValue\";\n}\nfunction isNullValue(value) {\n return value.kind === \"NullValue\";\n}\nexport function valueToObjectRepresentation(argObj, name, value, variables) {\n if (isIntValue(value) || isFloatValue(value)) {\n argObj[name.value] = Number(value.value);\n }\n else if (isBooleanValue(value) || isStringValue(value)) {\n argObj[name.value] = value.value;\n }\n else if (isObjectValue(value)) {\n var nestedArgObj_1 = {};\n value.fields.map(function (obj) {\n return valueToObjectRepresentation(nestedArgObj_1, obj.name, obj.value, variables);\n });\n argObj[name.value] = nestedArgObj_1;\n }\n else if (isVariable(value)) {\n var variableValue = (variables || {})[value.name.value];\n argObj[name.value] = variableValue;\n }\n else if (isListValue(value)) {\n argObj[name.value] = value.values.map(function (listValue) {\n var nestedArgArrayObj = {};\n valueToObjectRepresentation(nestedArgArrayObj, name, listValue, variables);\n return nestedArgArrayObj[name.value];\n });\n }\n else if (isEnumValue(value)) {\n argObj[name.value] = value.value;\n }\n else if (isNullValue(value)) {\n argObj[name.value] = null;\n }\n else {\n throw newInvariantError(85, name.value, value.kind);\n }\n}\nexport function storeKeyNameFromField(field, variables) {\n var directivesObj = null;\n if (field.directives) {\n directivesObj = {};\n field.directives.forEach(function (directive) {\n directivesObj[directive.name.value] = {};\n if (directive.arguments) {\n directive.arguments.forEach(function (_a) {\n var name = _a.name, value = _a.value;\n return valueToObjectRepresentation(directivesObj[directive.name.value], name, value, variables);\n });\n }\n });\n }\n var argObj = null;\n if (field.arguments && field.arguments.length) {\n argObj = {};\n field.arguments.forEach(function (_a) {\n var name = _a.name, value = _a.value;\n return valueToObjectRepresentation(argObj, name, value, variables);\n });\n }\n return getStoreKeyName(field.name.value, argObj, directivesObj);\n}\nvar KNOWN_DIRECTIVES = [\n \"connection\",\n \"include\",\n \"skip\",\n \"client\",\n \"rest\",\n \"export\",\n \"nonreactive\",\n];\n// Default stable JSON.stringify implementation used by getStoreKeyName. Can be\n// updated/replaced with something better by calling\n// getStoreKeyName.setStringify(newStringifyFunction).\nvar storeKeyNameStringify = canonicalStringify;\nexport var getStoreKeyName = Object.assign(function (fieldName, args, directives) {\n if (args &&\n directives &&\n directives[\"connection\"] &&\n directives[\"connection\"][\"key\"]) {\n if (directives[\"connection\"][\"filter\"] &&\n directives[\"connection\"][\"filter\"].length > 0) {\n var filterKeys = directives[\"connection\"][\"filter\"] ?\n directives[\"connection\"][\"filter\"]\n : [];\n filterKeys.sort();\n var filteredArgs_1 = {};\n filterKeys.forEach(function (key) {\n filteredArgs_1[key] = args[key];\n });\n return \"\".concat(directives[\"connection\"][\"key\"], \"(\").concat(storeKeyNameStringify(filteredArgs_1), \")\");\n }\n else {\n return directives[\"connection\"][\"key\"];\n }\n }\n var completeFieldName = fieldName;\n if (args) {\n // We can't use `JSON.stringify` here since it's non-deterministic,\n // and can lead to different store key names being created even though\n // the `args` object used during creation has the same properties/values.\n var stringifiedArgs = storeKeyNameStringify(args);\n completeFieldName += \"(\".concat(stringifiedArgs, \")\");\n }\n if (directives) {\n Object.keys(directives).forEach(function (key) {\n if (KNOWN_DIRECTIVES.indexOf(key) !== -1)\n return;\n if (directives[key] && Object.keys(directives[key]).length) {\n completeFieldName += \"@\".concat(key, \"(\").concat(storeKeyNameStringify(directives[key]), \")\");\n }\n else {\n completeFieldName += \"@\".concat(key);\n }\n });\n }\n return completeFieldName;\n}, {\n setStringify: function (s) {\n var previous = storeKeyNameStringify;\n storeKeyNameStringify = s;\n return previous;\n },\n});\nexport function argumentsObjectFromField(field, variables) {\n if (field.arguments && field.arguments.length) {\n var argObj_1 = {};\n field.arguments.forEach(function (_a) {\n var name = _a.name, value = _a.value;\n return valueToObjectRepresentation(argObj_1, name, value, variables);\n });\n return argObj_1;\n }\n return null;\n}\nexport function resultKeyNameFromField(field) {\n return field.alias ? field.alias.value : field.name.value;\n}\nexport function getTypenameFromResult(result, selectionSet, fragmentMap) {\n var fragments;\n for (var _i = 0, _a = selectionSet.selections; _i < _a.length; _i++) {\n var selection = _a[_i];\n if (isField(selection)) {\n if (selection.name.value === \"__typename\") {\n return result[resultKeyNameFromField(selection)];\n }\n }\n else if (fragments) {\n fragments.push(selection);\n }\n else {\n fragments = [selection];\n }\n }\n if (typeof result.__typename === \"string\") {\n return result.__typename;\n }\n if (fragments) {\n for (var _b = 0, fragments_1 = fragments; _b < fragments_1.length; _b++) {\n var selection = fragments_1[_b];\n var typename = getTypenameFromResult(result, getFragmentFromSelection(selection, fragmentMap).selectionSet, fragmentMap);\n if (typeof typename === \"string\") {\n return typename;\n }\n }\n }\n}\nexport function isField(selection) {\n return selection.kind === \"Field\";\n}\nexport function isInlineFragment(selection) {\n return selection.kind === \"InlineFragment\";\n}\n//# sourceMappingURL=storeUtils.js.map","import { invariant, newInvariantError } from \"../globals/index.js\";\nimport { valueToObjectRepresentation } from \"./storeUtils.js\";\n// Checks the document for errors and throws an exception if there is an error.\nexport function checkDocument(doc) {\n invariant(doc && doc.kind === \"Document\", 77);\n var operations = doc.definitions\n .filter(function (d) { return d.kind !== \"FragmentDefinition\"; })\n .map(function (definition) {\n if (definition.kind !== \"OperationDefinition\") {\n throw newInvariantError(78, definition.kind);\n }\n return definition;\n });\n invariant(operations.length <= 1, 79, operations.length);\n return doc;\n}\nexport function getOperationDefinition(doc) {\n checkDocument(doc);\n return doc.definitions.filter(function (definition) {\n return definition.kind === \"OperationDefinition\";\n })[0];\n}\nexport function getOperationName(doc) {\n return (doc.definitions\n .filter(function (definition) {\n return definition.kind === \"OperationDefinition\" && !!definition.name;\n })\n .map(function (x) { return x.name.value; })[0] || null);\n}\n// Returns the FragmentDefinitions from a particular document as an array\nexport function getFragmentDefinitions(doc) {\n return doc.definitions.filter(function (definition) {\n return definition.kind === \"FragmentDefinition\";\n });\n}\nexport function getQueryDefinition(doc) {\n var queryDef = getOperationDefinition(doc);\n invariant(queryDef && queryDef.operation === \"query\", 80);\n return queryDef;\n}\nexport function getFragmentDefinition(doc) {\n invariant(doc.kind === \"Document\", 81);\n invariant(doc.definitions.length <= 1, 82);\n var fragmentDef = doc.definitions[0];\n invariant(fragmentDef.kind === \"FragmentDefinition\", 83);\n return fragmentDef;\n}\n/**\n * Returns the first operation definition found in this document.\n * If no operation definition is found, the first fragment definition will be returned.\n * If no definitions are found, an error will be thrown.\n */\nexport function getMainDefinition(queryDoc) {\n checkDocument(queryDoc);\n var fragmentDefinition;\n for (var _i = 0, _a = queryDoc.definitions; _i < _a.length; _i++) {\n var definition = _a[_i];\n if (definition.kind === \"OperationDefinition\") {\n var operation = definition.operation;\n if (operation === \"query\" ||\n operation === \"mutation\" ||\n operation === \"subscription\") {\n return definition;\n }\n }\n if (definition.kind === \"FragmentDefinition\" && !fragmentDefinition) {\n // we do this because we want to allow multiple fragment definitions\n // to precede an operation definition.\n fragmentDefinition = definition;\n }\n }\n if (fragmentDefinition) {\n return fragmentDefinition;\n }\n throw newInvariantError(84);\n}\nexport function getDefaultValues(definition) {\n var defaultValues = Object.create(null);\n var defs = definition && definition.variableDefinitions;\n if (defs && defs.length) {\n defs.forEach(function (def) {\n if (def.defaultValue) {\n valueToObjectRepresentation(defaultValues, def.variable.name, def.defaultValue);\n }\n });\n }\n return defaultValues;\n}\n//# sourceMappingURL=getFromAST.js.map","import { newInvariantError, invariant } from \"../../utilities/globals/index.js\";\nimport { Observable } from \"../../utilities/index.js\";\nimport { validateOperation, createOperation, transformOperation, } from \"../utils/index.js\";\nfunction passthrough(op, forward) {\n return (forward ? forward(op) : Observable.of());\n}\nfunction toLink(handler) {\n return typeof handler === \"function\" ? new ApolloLink(handler) : handler;\n}\nfunction isTerminating(link) {\n return link.request.length <= 1;\n}\nvar ApolloLink = /** @class */ (function () {\n function ApolloLink(request) {\n if (request)\n this.request = request;\n }\n ApolloLink.empty = function () {\n return new ApolloLink(function () { return Observable.of(); });\n };\n ApolloLink.from = function (links) {\n if (links.length === 0)\n return ApolloLink.empty();\n return links.map(toLink).reduce(function (x, y) { return x.concat(y); });\n };\n ApolloLink.split = function (test, left, right) {\n var leftLink = toLink(left);\n var rightLink = toLink(right || new ApolloLink(passthrough));\n var ret;\n if (isTerminating(leftLink) && isTerminating(rightLink)) {\n ret = new ApolloLink(function (operation) {\n return test(operation) ?\n leftLink.request(operation) || Observable.of()\n : rightLink.request(operation) || Observable.of();\n });\n }\n else {\n ret = new ApolloLink(function (operation, forward) {\n return test(operation) ?\n leftLink.request(operation, forward) || Observable.of()\n : rightLink.request(operation, forward) || Observable.of();\n });\n }\n return Object.assign(ret, { left: leftLink, right: rightLink });\n };\n ApolloLink.execute = function (link, operation) {\n return (link.request(createOperation(operation.context, transformOperation(validateOperation(operation)))) || Observable.of());\n };\n ApolloLink.concat = function (first, second) {\n var firstLink = toLink(first);\n if (isTerminating(firstLink)) {\n globalThis.__DEV__ !== false && invariant.warn(36, firstLink);\n return firstLink;\n }\n var nextLink = toLink(second);\n var ret;\n if (isTerminating(nextLink)) {\n ret = new ApolloLink(function (operation) {\n return firstLink.request(operation, function (op) { return nextLink.request(op) || Observable.of(); }) || Observable.of();\n });\n }\n else {\n ret = new ApolloLink(function (operation, forward) {\n return (firstLink.request(operation, function (op) {\n return nextLink.request(op, forward) || Observable.of();\n }) || Observable.of());\n });\n }\n return Object.assign(ret, { left: firstLink, right: nextLink });\n };\n ApolloLink.prototype.split = function (test, left, right) {\n return this.concat(ApolloLink.split(test, left, right || new ApolloLink(passthrough)));\n };\n ApolloLink.prototype.concat = function (next) {\n return ApolloLink.concat(this, next);\n };\n ApolloLink.prototype.request = function (operation, forward) {\n throw newInvariantError(37);\n };\n ApolloLink.prototype.onError = function (error, observer) {\n if (observer && observer.error) {\n observer.error(error);\n // Returning false indicates that observer.error does not need to be\n // called again, since it was already called (on the previous line).\n // Calling observer.error again would not cause any real problems,\n // since only the first call matters, but custom onError functions\n // might have other reasons for wanting to prevent the default\n // behavior by returning false.\n return false;\n }\n // Throw errors will be passed to observer.error.\n throw error;\n };\n ApolloLink.prototype.setOnError = function (fn) {\n this.onError = fn;\n return this;\n };\n return ApolloLink;\n}());\nexport { ApolloLink };\n//# sourceMappingURL=ApolloLink.js.map","import { __assign } from \"tslib\";\nexport function createOperation(starting, operation) {\n var context = __assign({}, starting);\n var setContext = function (next) {\n if (typeof next === \"function\") {\n context = __assign(__assign({}, context), next(context));\n }\n else {\n context = __assign(__assign({}, context), next);\n }\n };\n var getContext = function () { return (__assign({}, context)); };\n Object.defineProperty(operation, \"setContext\", {\n enumerable: false,\n value: setContext,\n });\n Object.defineProperty(operation, \"getContext\", {\n enumerable: false,\n value: getContext,\n });\n return operation;\n}\n//# sourceMappingURL=createOperation.js.map","import { getOperationName } from \"../../utilities/index.js\";\nexport function transformOperation(operation) {\n var transformedOperation = {\n variables: operation.variables || {},\n extensions: operation.extensions || {},\n operationName: operation.operationName,\n query: operation.query,\n };\n // Best guess at an operation name\n if (!transformedOperation.operationName) {\n transformedOperation.operationName =\n typeof transformedOperation.query !== \"string\" ?\n getOperationName(transformedOperation.query) || undefined\n : \"\";\n }\n return transformedOperation;\n}\n//# sourceMappingURL=transformOperation.js.map","import { newInvariantError } from \"../../utilities/globals/index.js\";\nexport function validateOperation(operation) {\n var OPERATION_FIELDS = [\n \"query\",\n \"operationName\",\n \"variables\",\n \"extensions\",\n \"context\",\n ];\n for (var _i = 0, _a = Object.keys(operation); _i < _a.length; _i++) {\n var key = _a[_i];\n if (OPERATION_FIELDS.indexOf(key) < 0) {\n throw newInvariantError(44, key);\n }\n }\n return operation;\n}\n//# sourceMappingURL=validateOperation.js.map","import { ApolloLink } from \"./ApolloLink.js\";\nexport var execute = ApolloLink.execute;\n//# sourceMappingURL=execute.js.map","import { devAssert } from '../jsutils/devAssert.mjs';\nimport { inspect } from '../jsutils/inspect.mjs';\nimport { isNode, QueryDocumentKeys } from './ast.mjs';\nimport { Kind } from './kinds.mjs';\n/**\n * A visitor is provided to visit, it contains the collection of\n * relevant functions to be called during the visitor's traversal.\n */\n\nexport const BREAK = Object.freeze({});\n/**\n * visit() will walk through an AST using a depth-first traversal, calling\n * the visitor's enter function at each node in the traversal, and calling the\n * leave function after visiting that node and all of its child nodes.\n *\n * By returning different values from the enter and leave functions, the\n * behavior of the visitor can be altered, including skipping over a sub-tree of\n * the AST (by returning false), editing the AST by returning a value or null\n * to remove the value, or to stop the whole traversal by returning BREAK.\n *\n * When using visit() to edit an AST, the original AST will not be modified, and\n * a new version of the AST with the changes applied will be returned from the\n * visit function.\n *\n * ```ts\n * const editedAST = visit(ast, {\n * enter(node, key, parent, path, ancestors) {\n * // @return\n * // undefined: no action\n * // false: skip visiting this node\n * // visitor.BREAK: stop visiting altogether\n * // null: delete this node\n * // any value: replace this node with the returned value\n * },\n * leave(node, key, parent, path, ancestors) {\n * // @return\n * // undefined: no action\n * // false: no action\n * // visitor.BREAK: stop visiting altogether\n * // null: delete this node\n * // any value: replace this node with the returned value\n * }\n * });\n * ```\n *\n * Alternatively to providing enter() and leave() functions, a visitor can\n * instead provide functions named the same as the kinds of AST nodes, or\n * enter/leave visitors at a named key, leading to three permutations of the\n * visitor API:\n *\n * 1) Named visitors triggered when entering a node of a specific kind.\n *\n * ```ts\n * visit(ast, {\n * Kind(node) {\n * // enter the \"Kind\" node\n * }\n * })\n * ```\n *\n * 2) Named visitors that trigger upon entering and leaving a node of a specific kind.\n *\n * ```ts\n * visit(ast, {\n * Kind: {\n * enter(node) {\n * // enter the \"Kind\" node\n * }\n * leave(node) {\n * // leave the \"Kind\" node\n * }\n * }\n * })\n * ```\n *\n * 3) Generic visitors that trigger upon entering and leaving any node.\n *\n * ```ts\n * visit(ast, {\n * enter(node) {\n * // enter any node\n * },\n * leave(node) {\n * // leave any node\n * }\n * })\n * ```\n */\n\nexport function visit(root, visitor, visitorKeys = QueryDocumentKeys) {\n const enterLeaveMap = new Map();\n\n for (const kind of Object.values(Kind)) {\n enterLeaveMap.set(kind, getEnterLeaveForKind(visitor, kind));\n }\n /* eslint-disable no-undef-init */\n\n let stack = undefined;\n let inArray = Array.isArray(root);\n let keys = [root];\n let index = -1;\n let edits = [];\n let node = root;\n let key = undefined;\n let parent = undefined;\n const path = [];\n const ancestors = [];\n /* eslint-enable no-undef-init */\n\n do {\n index++;\n const isLeaving = index === keys.length;\n const isEdited = isLeaving && edits.length !== 0;\n\n if (isLeaving) {\n key = ancestors.length === 0 ? undefined : path[path.length - 1];\n node = parent;\n parent = ancestors.pop();\n\n if (isEdited) {\n if (inArray) {\n node = node.slice();\n let editOffset = 0;\n\n for (const [editKey, editValue] of edits) {\n const arrayKey = editKey - editOffset;\n\n if (editValue === null) {\n node.splice(arrayKey, 1);\n editOffset++;\n } else {\n node[arrayKey] = editValue;\n }\n }\n } else {\n node = Object.defineProperties(\n {},\n Object.getOwnPropertyDescriptors(node),\n );\n\n for (const [editKey, editValue] of edits) {\n node[editKey] = editValue;\n }\n }\n }\n\n index = stack.index;\n keys = stack.keys;\n edits = stack.edits;\n inArray = stack.inArray;\n stack = stack.prev;\n } else if (parent) {\n key = inArray ? index : keys[index];\n node = parent[key];\n\n if (node === null || node === undefined) {\n continue;\n }\n\n path.push(key);\n }\n\n let result;\n\n if (!Array.isArray(node)) {\n var _enterLeaveMap$get, _enterLeaveMap$get2;\n\n isNode(node) || devAssert(false, `Invalid AST Node: ${inspect(node)}.`);\n const visitFn = isLeaving\n ? (_enterLeaveMap$get = enterLeaveMap.get(node.kind)) === null ||\n _enterLeaveMap$get === void 0\n ? void 0\n : _enterLeaveMap$get.leave\n : (_enterLeaveMap$get2 = enterLeaveMap.get(node.kind)) === null ||\n _enterLeaveMap$get2 === void 0\n ? void 0\n : _enterLeaveMap$get2.enter;\n result =\n visitFn === null || visitFn === void 0\n ? void 0\n : visitFn.call(visitor, node, key, parent, path, ancestors);\n\n if (result === BREAK) {\n break;\n }\n\n if (result === false) {\n if (!isLeaving) {\n path.pop();\n continue;\n }\n } else if (result !== undefined) {\n edits.push([key, result]);\n\n if (!isLeaving) {\n if (isNode(result)) {\n node = result;\n } else {\n path.pop();\n continue;\n }\n }\n }\n }\n\n if (result === undefined && isEdited) {\n edits.push([key, node]);\n }\n\n if (isLeaving) {\n path.pop();\n } else {\n var _node$kind;\n\n stack = {\n inArray,\n index,\n keys,\n edits,\n prev: stack,\n };\n inArray = Array.isArray(node);\n keys = inArray\n ? node\n : (_node$kind = visitorKeys[node.kind]) !== null &&\n _node$kind !== void 0\n ? _node$kind\n : [];\n index = -1;\n edits = [];\n\n if (parent) {\n ancestors.push(parent);\n }\n\n parent = node;\n }\n } while (stack !== undefined);\n\n if (edits.length !== 0) {\n // New root\n return edits[edits.length - 1][1];\n }\n\n return root;\n}\n/**\n * Creates a new visitor instance which delegates to many visitors to run in\n * parallel. Each visitor will be visited for each node before moving on.\n *\n * If a prior visitor edits a node, no following visitors will see that node.\n */\n\nexport function visitInParallel(visitors) {\n const skipping = new Array(visitors.length).fill(null);\n const mergedVisitor = Object.create(null);\n\n for (const kind of Object.values(Kind)) {\n let hasVisitor = false;\n const enterList = new Array(visitors.length).fill(undefined);\n const leaveList = new Array(visitors.length).fill(undefined);\n\n for (let i = 0; i < visitors.length; ++i) {\n const { enter, leave } = getEnterLeaveForKind(visitors[i], kind);\n hasVisitor || (hasVisitor = enter != null || leave != null);\n enterList[i] = enter;\n leaveList[i] = leave;\n }\n\n if (!hasVisitor) {\n continue;\n }\n\n const mergedEnterLeave = {\n enter(...args) {\n const node = args[0];\n\n for (let i = 0; i < visitors.length; i++) {\n if (skipping[i] === null) {\n var _enterList$i;\n\n const result =\n (_enterList$i = enterList[i]) === null || _enterList$i === void 0\n ? void 0\n : _enterList$i.apply(visitors[i], args);\n\n if (result === false) {\n skipping[i] = node;\n } else if (result === BREAK) {\n skipping[i] = BREAK;\n } else if (result !== undefined) {\n return result;\n }\n }\n }\n },\n\n leave(...args) {\n const node = args[0];\n\n for (let i = 0; i < visitors.length; i++) {\n if (skipping[i] === null) {\n var _leaveList$i;\n\n const result =\n (_leaveList$i = leaveList[i]) === null || _leaveList$i === void 0\n ? void 0\n : _leaveList$i.apply(visitors[i], args);\n\n if (result === BREAK) {\n skipping[i] = BREAK;\n } else if (result !== undefined && result !== false) {\n return result;\n }\n } else if (skipping[i] === node) {\n skipping[i] = null;\n }\n }\n },\n };\n mergedVisitor[kind] = mergedEnterLeave;\n }\n\n return mergedVisitor;\n}\n/**\n * Given a visitor instance and a node kind, return EnterLeaveVisitor for that kind.\n */\n\nexport function getEnterLeaveForKind(visitor, kind) {\n const kindVisitor = visitor[kind];\n\n if (typeof kindVisitor === 'object') {\n // { Kind: { enter() {}, leave() {} } }\n return kindVisitor;\n } else if (typeof kindVisitor === 'function') {\n // { Kind() {} }\n return {\n enter: kindVisitor,\n leave: undefined,\n };\n } // { enter() {}, leave() {} }\n\n return {\n enter: visitor.enter,\n leave: visitor.leave,\n };\n}\n/**\n * Given a visitor instance, if it is leaving or not, and a node kind, return\n * the function the visitor runtime should call.\n *\n * @deprecated Please use `getEnterLeaveForKind` instead. Will be removed in v17\n */\n\n/* c8 ignore next 8 */\n\nexport function getVisitFn(visitor, kind, isLeaving) {\n const { enter, leave } = getEnterLeaveForKind(visitor, kind);\n return isLeaving ? leave : enter;\n}\n","import { invariant } from \"../globals/index.js\";\nimport { visit, BREAK } from \"graphql\";\nexport function shouldInclude(_a, variables) {\n var directives = _a.directives;\n if (!directives || !directives.length) {\n return true;\n }\n return getInclusionDirectives(directives).every(function (_a) {\n var directive = _a.directive, ifArgument = _a.ifArgument;\n var evaledValue = false;\n if (ifArgument.value.kind === \"Variable\") {\n evaledValue =\n variables && variables[ifArgument.value.name.value];\n invariant(evaledValue !== void 0, 70, directive.name.value);\n }\n else {\n evaledValue = ifArgument.value.value;\n }\n return directive.name.value === \"skip\" ? !evaledValue : evaledValue;\n });\n}\nexport function getDirectiveNames(root) {\n var names = [];\n visit(root, {\n Directive: function (node) {\n names.push(node.name.value);\n },\n });\n return names;\n}\nexport var hasAnyDirectives = function (names, root) {\n return hasDirectives(names, root, false);\n};\nexport var hasAllDirectives = function (names, root) {\n return hasDirectives(names, root, true);\n};\nexport function hasDirectives(names, root, all) {\n var nameSet = new Set(names);\n var uniqueCount = nameSet.size;\n visit(root, {\n Directive: function (node) {\n if (nameSet.delete(node.name.value) && (!all || !nameSet.size)) {\n return BREAK;\n }\n },\n });\n // If we found all the names, nameSet will be empty. If we only care about\n // finding some of them, the < condition is sufficient.\n return all ? !nameSet.size : nameSet.size < uniqueCount;\n}\nexport function hasClientExports(document) {\n return document && hasDirectives([\"client\", \"export\"], document, true);\n}\nfunction isInclusionDirective(_a) {\n var value = _a.name.value;\n return value === \"skip\" || value === \"include\";\n}\nexport function getInclusionDirectives(directives) {\n var result = [];\n if (directives && directives.length) {\n directives.forEach(function (directive) {\n if (!isInclusionDirective(directive))\n return;\n var directiveArguments = directive.arguments;\n var directiveName = directive.name.value;\n invariant(directiveArguments && directiveArguments.length === 1, 71, directiveName);\n var ifArgument = directiveArguments[0];\n invariant(ifArgument.name && ifArgument.name.value === \"if\", 72, directiveName);\n var ifValue = ifArgument.value;\n // means it has to be a variable value if this is a valid @skip or @include directive\n invariant(ifValue &&\n (ifValue.kind === \"Variable\" || ifValue.kind === \"BooleanValue\"), 73, directiveName);\n result.push({ directive: directive, ifArgument: ifArgument });\n });\n }\n return result;\n}\n//# sourceMappingURL=directives.js.map","import { newInvariantError } from \"../../utilities/globals/index.js\";\nexport var serializeFetchParameter = function (p, label) {\n var serialized;\n try {\n serialized = JSON.stringify(p);\n }\n catch (e) {\n var parseError = newInvariantError(40, label, e.message);\n parseError.parseError = e;\n throw parseError;\n }\n return serialized;\n};\n//# sourceMappingURL=serializeFetchParameter.js.map","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/nodeStream.ts\n */\nimport { canUseAsyncIteratorSymbol } from \"../../../utilities/index.js\";\nexport default function nodeStreamIterator(stream) {\n var cleanup = null;\n var error = null;\n var done = false;\n var data = [];\n var waiting = [];\n function onData(chunk) {\n if (error)\n return;\n if (waiting.length) {\n var shiftedArr = waiting.shift();\n if (Array.isArray(shiftedArr) && shiftedArr[0]) {\n return shiftedArr[0]({ value: chunk, done: false });\n }\n }\n data.push(chunk);\n }\n function onError(err) {\n error = err;\n var all = waiting.slice();\n all.forEach(function (pair) {\n pair[1](err);\n });\n !cleanup || cleanup();\n }\n function onEnd() {\n done = true;\n var all = waiting.slice();\n all.forEach(function (pair) {\n pair[0]({ value: undefined, done: true });\n });\n !cleanup || cleanup();\n }\n cleanup = function () {\n cleanup = null;\n stream.removeListener(\"data\", onData);\n stream.removeListener(\"error\", onError);\n stream.removeListener(\"end\", onEnd);\n stream.removeListener(\"finish\", onEnd);\n stream.removeListener(\"close\", onEnd);\n };\n stream.on(\"data\", onData);\n stream.on(\"error\", onError);\n stream.on(\"end\", onEnd);\n stream.on(\"finish\", onEnd);\n stream.on(\"close\", onEnd);\n function getNext() {\n return new Promise(function (resolve, reject) {\n if (error)\n return reject(error);\n if (data.length)\n return resolve({ value: data.shift(), done: false });\n if (done)\n return resolve({ value: undefined, done: true });\n waiting.push([resolve, reject]);\n });\n }\n var iterator = {\n next: function () {\n return getNext();\n },\n };\n if (canUseAsyncIteratorSymbol) {\n iterator[Symbol.asyncIterator] = function () {\n return this;\n };\n }\n return iterator;\n}\n//# sourceMappingURL=nodeStream.js.map","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/reader.ts\n */\nimport { canUseAsyncIteratorSymbol } from \"../../../utilities/index.js\";\nexport default function readerIterator(reader) {\n var iterator = {\n next: function () {\n return reader.read();\n },\n };\n if (canUseAsyncIteratorSymbol) {\n iterator[Symbol.asyncIterator] = function () {\n return this;\n };\n }\n return iterator;\n}\n//# sourceMappingURL=reader.js.map","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/index.ts\n */\nimport { canUseAsyncIteratorSymbol } from \"../../utilities/index.js\";\nimport asyncIterator from \"./iterators/async.js\";\nimport nodeStreamIterator from \"./iterators/nodeStream.js\";\nimport promiseIterator from \"./iterators/promise.js\";\nimport readerIterator from \"./iterators/reader.js\";\nfunction isNodeResponse(value) {\n return !!value.body;\n}\nfunction isReadableStream(value) {\n return !!value.getReader;\n}\nfunction isAsyncIterableIterator(value) {\n return !!(canUseAsyncIteratorSymbol &&\n value[Symbol.asyncIterator]);\n}\nfunction isStreamableBlob(value) {\n return !!value.stream;\n}\nfunction isBlob(value) {\n return !!value.arrayBuffer;\n}\nfunction isNodeReadableStream(value) {\n return !!value.pipe;\n}\nexport function responseIterator(response) {\n var body = response;\n if (isNodeResponse(response))\n body = response.body;\n if (isAsyncIterableIterator(body))\n return asyncIterator(body);\n if (isReadableStream(body))\n return readerIterator(body.getReader());\n // this errors without casting to ReadableStream
\n // because Blob.stream() returns a NodeJS ReadableStream\n if (isStreamableBlob(body)) {\n return readerIterator(body.stream().getReader());\n }\n if (isBlob(body))\n return promiseIterator(body.arrayBuffer());\n if (isNodeReadableStream(body))\n return nodeStreamIterator(body);\n throw new Error(\"Unknown body type for responseIterator. Please pass a streamable response.\");\n}\n//# sourceMappingURL=responseIterator.js.map","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/async.ts\n */\nexport default function asyncIterator(source) {\n var _a;\n var iterator = source[Symbol.asyncIterator]();\n return _a = {\n next: function () {\n return iterator.next();\n }\n },\n _a[Symbol.asyncIterator] = function () {\n return this;\n },\n _a;\n}\n//# sourceMappingURL=async.js.map","/**\n * Original source:\n * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/promise.ts\n */\nimport { canUseAsyncIteratorSymbol } from \"../../../utilities/index.js\";\nexport default function promiseIterator(promise) {\n var resolved = false;\n var iterator = {\n next: function () {\n if (resolved)\n return Promise.resolve({\n value: undefined,\n done: true,\n });\n resolved = true;\n return new Promise(function (resolve, reject) {\n promise\n .then(function (value) {\n resolve({ value: value, done: false });\n })\n .catch(reject);\n });\n },\n };\n if (canUseAsyncIteratorSymbol) {\n iterator[Symbol.asyncIterator] = function () {\n return this;\n };\n }\n return iterator;\n}\n//# sourceMappingURL=promise.js.map","export var throwServerError = function (response, result, message) {\n var error = new Error(message);\n error.name = \"ServerError\";\n error.response = response;\n error.statusCode = response.status;\n error.result = result;\n throw error;\n};\n//# sourceMappingURL=throwServerError.js.map","import { __assign, __spreadArray } from \"tslib\";\nimport { isNonNullObject } from \"./objects.js\";\nvar hasOwnProperty = Object.prototype.hasOwnProperty;\nexport function mergeDeep() {\n var sources = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n sources[_i] = arguments[_i];\n }\n return mergeDeepArray(sources);\n}\n// In almost any situation where you could succeed in getting the\n// TypeScript compiler to infer a tuple type for the sources array, you\n// could just use mergeDeep instead of mergeDeepArray, so instead of\n// trying to convert T[] to an intersection type we just infer the array\n// element type, which works perfectly when the sources array has a\n// consistent element type.\nexport function mergeDeepArray(sources) {\n var target = sources[0] || {};\n var count = sources.length;\n if (count > 1) {\n var merger = new DeepMerger();\n for (var i = 1; i < count; ++i) {\n target = merger.merge(target, sources[i]);\n }\n }\n return target;\n}\nvar defaultReconciler = function (target, source, property) {\n return this.merge(target[property], source[property]);\n};\nvar DeepMerger = /** @class */ (function () {\n function DeepMerger(reconciler) {\n if (reconciler === void 0) { reconciler = defaultReconciler; }\n this.reconciler = reconciler;\n this.isObject = isNonNullObject;\n this.pastCopies = new Set();\n }\n DeepMerger.prototype.merge = function (target, source) {\n var _this = this;\n var context = [];\n for (var _i = 2; _i < arguments.length; _i++) {\n context[_i - 2] = arguments[_i];\n }\n if (isNonNullObject(source) && isNonNullObject(target)) {\n Object.keys(source).forEach(function (sourceKey) {\n if (hasOwnProperty.call(target, sourceKey)) {\n var targetValue = target[sourceKey];\n if (source[sourceKey] !== targetValue) {\n var result = _this.reconciler.apply(_this, __spreadArray([target,\n source,\n sourceKey], context, false));\n // A well-implemented reconciler may return targetValue to indicate\n // the merge changed nothing about the structure of the target.\n if (result !== targetValue) {\n target = _this.shallowCopyForMerge(target);\n target[sourceKey] = result;\n }\n }\n }\n else {\n // If there is no collision, the target can safely share memory with\n // the source, and the recursion can terminate here.\n target = _this.shallowCopyForMerge(target);\n target[sourceKey] = source[sourceKey];\n }\n });\n return target;\n }\n // If source (or target) is not an object, let source replace target.\n return source;\n };\n DeepMerger.prototype.shallowCopyForMerge = function (value) {\n if (isNonNullObject(value)) {\n if (!this.pastCopies.has(value)) {\n if (Array.isArray(value)) {\n value = value.slice(0);\n }\n else {\n value = __assign({ __proto__: Object.getPrototypeOf(value) }, value);\n }\n this.pastCopies.add(value);\n }\n }\n return value;\n };\n return DeepMerger;\n}());\nexport { DeepMerger };\n//# sourceMappingURL=mergeDeep.js.map","import { isNonNullObject } from \"./objects.js\";\nimport { isNonEmptyArray } from \"./arrays.js\";\nimport { DeepMerger } from \"./mergeDeep.js\";\nexport function isExecutionPatchIncrementalResult(value) {\n return \"incremental\" in value;\n}\nexport function isExecutionPatchInitialResult(value) {\n return \"hasNext\" in value && \"data\" in value;\n}\nexport function isExecutionPatchResult(value) {\n return (isExecutionPatchIncrementalResult(value) ||\n isExecutionPatchInitialResult(value));\n}\n// This function detects an Apollo payload result before it is transformed\n// into a FetchResult via HttpLink; it cannot detect an ApolloPayloadResult\n// once it leaves the link chain.\nexport function isApolloPayloadResult(value) {\n return isNonNullObject(value) && \"payload\" in value;\n}\nexport function mergeIncrementalData(prevResult, result) {\n var mergedData = prevResult;\n var merger = new DeepMerger();\n if (isExecutionPatchIncrementalResult(result) &&\n isNonEmptyArray(result.incremental)) {\n result.incremental.forEach(function (_a) {\n var data = _a.data, path = _a.path;\n for (var i = path.length - 1; i >= 0; --i) {\n var key = path[i];\n var isNumericKey = !isNaN(+key);\n var parent_1 = isNumericKey ? [] : {};\n parent_1[key] = data;\n data = parent_1;\n }\n mergedData = merger.merge(mergedData, data);\n });\n }\n return mergedData;\n}\n//# sourceMappingURL=incrementalResult.js.map","import { __assign, __awaiter, __generator } from \"tslib\";\nimport { responseIterator } from \"./responseIterator.js\";\nimport { throwServerError } from \"../utils/index.js\";\nimport { PROTOCOL_ERRORS_SYMBOL } from \"../../errors/index.js\";\nimport { isApolloPayloadResult } from \"../../utilities/common/incrementalResult.js\";\nvar hasOwnProperty = Object.prototype.hasOwnProperty;\nexport function readMultipartBody(response, nextValue) {\n return __awaiter(this, void 0, void 0, function () {\n var decoder, contentType, delimiter, boundaryVal, boundary, buffer, iterator, running, _a, value, done, chunk, searchFrom, bi, message, i, headers, contentType_1, body, result, next;\n var _b, _c;\n var _d;\n return __generator(this, function (_e) {\n switch (_e.label) {\n case 0:\n if (TextDecoder === undefined) {\n throw new Error(\"TextDecoder must be defined in the environment: please import a polyfill.\");\n }\n decoder = new TextDecoder(\"utf-8\");\n contentType = (_d = response.headers) === null || _d === void 0 ? void 0 : _d.get(\"content-type\");\n delimiter = \"boundary=\";\n boundaryVal = (contentType === null || contentType === void 0 ? void 0 : contentType.includes(delimiter)) ?\n contentType === null || contentType === void 0 ? void 0 : contentType.substring((contentType === null || contentType === void 0 ? void 0 : contentType.indexOf(delimiter)) + delimiter.length).replace(/['\"]/g, \"\").replace(/\\;(.*)/gm, \"\").trim()\n : \"-\";\n boundary = \"\\r\\n--\".concat(boundaryVal);\n buffer = \"\";\n iterator = responseIterator(response);\n running = true;\n _e.label = 1;\n case 1:\n if (!running) return [3 /*break*/, 3];\n return [4 /*yield*/, iterator.next()];\n case 2:\n _a = _e.sent(), value = _a.value, done = _a.done;\n chunk = typeof value === \"string\" ? value : decoder.decode(value);\n searchFrom = buffer.length - boundary.length + 1;\n running = !done;\n buffer += chunk;\n bi = buffer.indexOf(boundary, searchFrom);\n while (bi > -1) {\n message = void 0;\n _b = [\n buffer.slice(0, bi),\n buffer.slice(bi + boundary.length),\n ], message = _b[0], buffer = _b[1];\n i = message.indexOf(\"\\r\\n\\r\\n\");\n headers = parseHeaders(message.slice(0, i));\n contentType_1 = headers[\"content-type\"];\n if (contentType_1 &&\n contentType_1.toLowerCase().indexOf(\"application/json\") === -1) {\n throw new Error(\"Unsupported patch content type: application/json is required.\");\n }\n body = message.slice(i);\n if (body) {\n result = parseJsonBody(response, body);\n if (Object.keys(result).length > 1 ||\n \"data\" in result ||\n \"incremental\" in result ||\n \"errors\" in result ||\n \"payload\" in result) {\n if (isApolloPayloadResult(result)) {\n next = {};\n if (\"payload\" in result) {\n if (Object.keys(result).length === 1 && result.payload === null) {\n return [2 /*return*/];\n }\n next = __assign({}, result.payload);\n }\n if (\"errors\" in result) {\n next = __assign(__assign({}, next), { extensions: __assign(__assign({}, (\"extensions\" in next ? next.extensions : null)), (_c = {}, _c[PROTOCOL_ERRORS_SYMBOL] = result.errors, _c)) });\n }\n nextValue(next);\n }\n else {\n // for the last chunk with only `hasNext: false`\n // we don't need to call observer.next as there is no data/errors\n nextValue(result);\n }\n }\n else if (\n // If the chunk contains only a \"hasNext: false\", we can call\n // observer.complete() immediately.\n Object.keys(result).length === 1 &&\n \"hasNext\" in result &&\n !result.hasNext) {\n return [2 /*return*/];\n }\n }\n bi = buffer.indexOf(boundary);\n }\n return [3 /*break*/, 1];\n case 3: return [2 /*return*/];\n }\n });\n });\n}\nexport function parseHeaders(headerText) {\n var headersInit = {};\n headerText.split(\"\\n\").forEach(function (line) {\n var i = line.indexOf(\":\");\n if (i > -1) {\n // normalize headers to lowercase\n var name_1 = line.slice(0, i).trim().toLowerCase();\n var value = line.slice(i + 1).trim();\n headersInit[name_1] = value;\n }\n });\n return headersInit;\n}\nexport function parseJsonBody(response, bodyText) {\n if (response.status >= 300) {\n // Network error\n var getResult = function () {\n try {\n return JSON.parse(bodyText);\n }\n catch (err) {\n return bodyText;\n }\n };\n throwServerError(response, getResult(), \"Response not successful: Received status code \".concat(response.status));\n }\n try {\n return JSON.parse(bodyText);\n }\n catch (err) {\n var parseError = err;\n parseError.name = \"ServerParseError\";\n parseError.response = response;\n parseError.statusCode = response.status;\n parseError.bodyText = bodyText;\n throw parseError;\n }\n}\nexport function handleError(err, observer) {\n // if it is a network error, BUT there is graphql result info fire\n // the next observer before calling error this gives apollo-client\n // (and react-apollo) the `graphqlErrors` and `networkErrors` to\n // pass to UI this should only happen if we *also* have data as\n // part of the response key per the spec\n if (err.result && err.result.errors && err.result.data) {\n // if we don't call next, the UI can only show networkError\n // because AC didn't get any graphqlErrors this is graphql\n // execution result info (i.e errors and possibly data) this is\n // because there is no formal spec how errors should translate to\n // http status codes. So an auth error (401) could have both data\n // from a public field, errors from a private field, and a status\n // of 401\n // {\n // user { // this will have errors\n // firstName\n // }\n // products { // this is public so will have data\n // cost\n // }\n // }\n //\n // the result of above *could* look like this:\n // {\n // data: { products: [{ cost: \"$10\" }] },\n // errors: [{\n // message: 'your session has timed out',\n // path: []\n // }]\n // }\n // status code of above would be a 401\n // in the UI you want to show data where you can, errors as data where you can\n // and use correct http status codes\n observer.next(err.result);\n }\n observer.error(err);\n}\nexport function parseAndCheckHttpResponse(operations) {\n return function (response) {\n return response\n .text()\n .then(function (bodyText) { return parseJsonBody(response, bodyText); })\n .then(function (result) {\n if (!Array.isArray(result) &&\n !hasOwnProperty.call(result, \"data\") &&\n !hasOwnProperty.call(result, \"errors\")) {\n // Data error\n throwServerError(response, result, \"Server response was missing for query '\".concat(Array.isArray(operations) ?\n operations.map(function (op) { return op.operationName; })\n : operations.operationName, \"'.\"));\n }\n return result;\n });\n };\n}\n//# sourceMappingURL=parseAndCheckHttpResponse.js.map","import { newInvariantError } from \"../../utilities/globals/index.js\";\nexport var checkFetcher = function (fetcher) {\n if (!fetcher && typeof fetch === \"undefined\") {\n throw newInvariantError(38);\n }\n};\n//# sourceMappingURL=checkFetcher.js.map","/**\n * Prints a string as a GraphQL StringValue literal. Replaces control characters\n * and excluded characters (\" U+0022 and \\\\ U+005C) with escape sequences.\n */\nexport function printString(str) {\n return `\"${str.replace(escapedRegExp, escapedReplacer)}\"`;\n} // eslint-disable-next-line no-control-regex\n\nconst escapedRegExp = /[\\x00-\\x1f\\x22\\x5c\\x7f-\\x9f]/g;\n\nfunction escapedReplacer(str) {\n return escapeSequences[str.charCodeAt(0)];\n} // prettier-ignore\n\nconst escapeSequences = [\n '\\\\u0000',\n '\\\\u0001',\n '\\\\u0002',\n '\\\\u0003',\n '\\\\u0004',\n '\\\\u0005',\n '\\\\u0006',\n '\\\\u0007',\n '\\\\b',\n '\\\\t',\n '\\\\n',\n '\\\\u000B',\n '\\\\f',\n '\\\\r',\n '\\\\u000E',\n '\\\\u000F',\n '\\\\u0010',\n '\\\\u0011',\n '\\\\u0012',\n '\\\\u0013',\n '\\\\u0014',\n '\\\\u0015',\n '\\\\u0016',\n '\\\\u0017',\n '\\\\u0018',\n '\\\\u0019',\n '\\\\u001A',\n '\\\\u001B',\n '\\\\u001C',\n '\\\\u001D',\n '\\\\u001E',\n '\\\\u001F',\n '',\n '',\n '\\\\\"',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '', // 2F\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '', // 3F\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '', // 4F\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '\\\\\\\\',\n '',\n '',\n '', // 5F\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '', // 6F\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '',\n '\\\\u007F',\n '\\\\u0080',\n '\\\\u0081',\n '\\\\u0082',\n '\\\\u0083',\n '\\\\u0084',\n '\\\\u0085',\n '\\\\u0086',\n '\\\\u0087',\n '\\\\u0088',\n '\\\\u0089',\n '\\\\u008A',\n '\\\\u008B',\n '\\\\u008C',\n '\\\\u008D',\n '\\\\u008E',\n '\\\\u008F',\n '\\\\u0090',\n '\\\\u0091',\n '\\\\u0092',\n '\\\\u0093',\n '\\\\u0094',\n '\\\\u0095',\n '\\\\u0096',\n '\\\\u0097',\n '\\\\u0098',\n '\\\\u0099',\n '\\\\u009A',\n '\\\\u009B',\n '\\\\u009C',\n '\\\\u009D',\n '\\\\u009E',\n '\\\\u009F',\n];\n","import { printBlockString } from './blockString.mjs';\nimport { printString } from './printString.mjs';\nimport { visit } from './visitor.mjs';\n/**\n * Converts an AST into a string, using one set of reasonable\n * formatting rules.\n */\n\nexport function print(ast) {\n return visit(ast, printDocASTReducer);\n}\nconst MAX_LINE_LENGTH = 80;\nconst printDocASTReducer = {\n Name: {\n leave: (node) => node.value,\n },\n Variable: {\n leave: (node) => '$' + node.name,\n },\n // Document\n Document: {\n leave: (node) => join(node.definitions, '\\n\\n'),\n },\n OperationDefinition: {\n leave(node) {\n const varDefs = wrap('(', join(node.variableDefinitions, ', '), ')');\n const prefix = join(\n [\n node.operation,\n join([node.name, varDefs]),\n join(node.directives, ' '),\n ],\n ' ',\n ); // Anonymous queries with no directives or variable definitions can use\n // the query short form.\n\n return (prefix === 'query' ? '' : prefix + ' ') + node.selectionSet;\n },\n },\n VariableDefinition: {\n leave: ({ variable, type, defaultValue, directives }) =>\n variable +\n ': ' +\n type +\n wrap(' = ', defaultValue) +\n wrap(' ', join(directives, ' ')),\n },\n SelectionSet: {\n leave: ({ selections }) => block(selections),\n },\n Field: {\n leave({ alias, name, arguments: args, directives, selectionSet }) {\n const prefix = wrap('', alias, ': ') + name;\n let argsLine = prefix + wrap('(', join(args, ', '), ')');\n\n if (argsLine.length > MAX_LINE_LENGTH) {\n argsLine = prefix + wrap('(\\n', indent(join(args, '\\n')), '\\n)');\n }\n\n return join([argsLine, join(directives, ' '), selectionSet], ' ');\n },\n },\n Argument: {\n leave: ({ name, value }) => name + ': ' + value,\n },\n // Fragments\n FragmentSpread: {\n leave: ({ name, directives }) =>\n '...' + name + wrap(' ', join(directives, ' ')),\n },\n InlineFragment: {\n leave: ({ typeCondition, directives, selectionSet }) =>\n join(\n [\n '...',\n wrap('on ', typeCondition),\n join(directives, ' '),\n selectionSet,\n ],\n ' ',\n ),\n },\n FragmentDefinition: {\n leave: (\n { name, typeCondition, variableDefinitions, directives, selectionSet }, // Note: fragment variable definitions are experimental and may be changed\n ) =>\n // or removed in the future.\n `fragment ${name}${wrap('(', join(variableDefinitions, ', '), ')')} ` +\n `on ${typeCondition} ${wrap('', join(directives, ' '), ' ')}` +\n selectionSet,\n },\n // Value\n IntValue: {\n leave: ({ value }) => value,\n },\n FloatValue: {\n leave: ({ value }) => value,\n },\n StringValue: {\n leave: ({ value, block: isBlockString }) =>\n isBlockString ? printBlockString(value) : printString(value),\n },\n BooleanValue: {\n leave: ({ value }) => (value ? 'true' : 'false'),\n },\n NullValue: {\n leave: () => 'null',\n },\n EnumValue: {\n leave: ({ value }) => value,\n },\n ListValue: {\n leave: ({ values }) => '[' + join(values, ', ') + ']',\n },\n ObjectValue: {\n leave: ({ fields }) => '{' + join(fields, ', ') + '}',\n },\n ObjectField: {\n leave: ({ name, value }) => name + ': ' + value,\n },\n // Directive\n Directive: {\n leave: ({ name, arguments: args }) =>\n '@' + name + wrap('(', join(args, ', '), ')'),\n },\n // Type\n NamedType: {\n leave: ({ name }) => name,\n },\n ListType: {\n leave: ({ type }) => '[' + type + ']',\n },\n NonNullType: {\n leave: ({ type }) => type + '!',\n },\n // Type System Definitions\n SchemaDefinition: {\n leave: ({ description, directives, operationTypes }) =>\n wrap('', description, '\\n') +\n join(['schema', join(directives, ' '), block(operationTypes)], ' '),\n },\n OperationTypeDefinition: {\n leave: ({ operation, type }) => operation + ': ' + type,\n },\n ScalarTypeDefinition: {\n leave: ({ description, name, directives }) =>\n wrap('', description, '\\n') +\n join(['scalar', name, join(directives, ' ')], ' '),\n },\n ObjectTypeDefinition: {\n leave: ({ description, name, interfaces, directives, fields }) =>\n wrap('', description, '\\n') +\n join(\n [\n 'type',\n name,\n wrap('implements ', join(interfaces, ' & ')),\n join(directives, ' '),\n block(fields),\n ],\n ' ',\n ),\n },\n FieldDefinition: {\n leave: ({ description, name, arguments: args, type, directives }) =>\n wrap('', description, '\\n') +\n name +\n (hasMultilineItems(args)\n ? wrap('(\\n', indent(join(args, '\\n')), '\\n)')\n : wrap('(', join(args, ', '), ')')) +\n ': ' +\n type +\n wrap(' ', join(directives, ' ')),\n },\n InputValueDefinition: {\n leave: ({ description, name, type, defaultValue, directives }) =>\n wrap('', description, '\\n') +\n join(\n [name + ': ' + type, wrap('= ', defaultValue), join(directives, ' ')],\n ' ',\n ),\n },\n InterfaceTypeDefinition: {\n leave: ({ description, name, interfaces, directives, fields }) =>\n wrap('', description, '\\n') +\n join(\n [\n 'interface',\n name,\n wrap('implements ', join(interfaces, ' & ')),\n join(directives, ' '),\n block(fields),\n ],\n ' ',\n ),\n },\n UnionTypeDefinition: {\n leave: ({ description, name, directives, types }) =>\n wrap('', description, '\\n') +\n join(\n ['union', name, join(directives, ' '), wrap('= ', join(types, ' | '))],\n ' ',\n ),\n },\n EnumTypeDefinition: {\n leave: ({ description, name, directives, values }) =>\n wrap('', description, '\\n') +\n join(['enum', name, join(directives, ' '), block(values)], ' '),\n },\n EnumValueDefinition: {\n leave: ({ description, name, directives }) =>\n wrap('', description, '\\n') + join([name, join(directives, ' ')], ' '),\n },\n InputObjectTypeDefinition: {\n leave: ({ description, name, directives, fields }) =>\n wrap('', description, '\\n') +\n join(['input', name, join(directives, ' '), block(fields)], ' '),\n },\n DirectiveDefinition: {\n leave: ({ description, name, arguments: args, repeatable, locations }) =>\n wrap('', description, '\\n') +\n 'directive @' +\n name +\n (hasMultilineItems(args)\n ? wrap('(\\n', indent(join(args, '\\n')), '\\n)')\n : wrap('(', join(args, ', '), ')')) +\n (repeatable ? ' repeatable' : '') +\n ' on ' +\n join(locations, ' | '),\n },\n SchemaExtension: {\n leave: ({ directives, operationTypes }) =>\n join(\n ['extend schema', join(directives, ' '), block(operationTypes)],\n ' ',\n ),\n },\n ScalarTypeExtension: {\n leave: ({ name, directives }) =>\n join(['extend scalar', name, join(directives, ' ')], ' '),\n },\n ObjectTypeExtension: {\n leave: ({ name, interfaces, directives, fields }) =>\n join(\n [\n 'extend type',\n name,\n wrap('implements ', join(interfaces, ' & ')),\n join(directives, ' '),\n block(fields),\n ],\n ' ',\n ),\n },\n InterfaceTypeExtension: {\n leave: ({ name, interfaces, directives, fields }) =>\n join(\n [\n 'extend interface',\n name,\n wrap('implements ', join(interfaces, ' & ')),\n join(directives, ' '),\n block(fields),\n ],\n ' ',\n ),\n },\n UnionTypeExtension: {\n leave: ({ name, directives, types }) =>\n join(\n [\n 'extend union',\n name,\n join(directives, ' '),\n wrap('= ', join(types, ' | ')),\n ],\n ' ',\n ),\n },\n EnumTypeExtension: {\n leave: ({ name, directives, values }) =>\n join(['extend enum', name, join(directives, ' '), block(values)], ' '),\n },\n InputObjectTypeExtension: {\n leave: ({ name, directives, fields }) =>\n join(['extend input', name, join(directives, ' '), block(fields)], ' '),\n },\n};\n/**\n * Given maybeArray, print an empty string if it is null or empty, otherwise\n * print all items together separated by separator if provided\n */\n\nfunction join(maybeArray, separator = '') {\n var _maybeArray$filter$jo;\n\n return (_maybeArray$filter$jo =\n maybeArray === null || maybeArray === void 0\n ? void 0\n : maybeArray.filter((x) => x).join(separator)) !== null &&\n _maybeArray$filter$jo !== void 0\n ? _maybeArray$filter$jo\n : '';\n}\n/**\n * Given array, print each item on its own line, wrapped in an indented `{ }` block.\n */\n\nfunction block(array) {\n return wrap('{\\n', indent(join(array, '\\n')), '\\n}');\n}\n/**\n * If maybeString is not null or empty, then wrap with start and end, otherwise print an empty string.\n */\n\nfunction wrap(start, maybeString, end = '') {\n return maybeString != null && maybeString !== ''\n ? start + maybeString + end\n : '';\n}\n\nfunction indent(str) {\n return wrap(' ', str.replace(/\\n/g, '\\n '));\n}\n\nfunction hasMultilineItems(maybeArray) {\n var _maybeArray$some;\n\n // FIXME: https://github.com/graphql/graphql-js/issues/2203\n\n /* c8 ignore next */\n return (_maybeArray$some =\n maybeArray === null || maybeArray === void 0\n ? void 0\n : maybeArray.some((str) => str.includes('\\n'))) !== null &&\n _maybeArray$some !== void 0\n ? _maybeArray$some\n : false;\n}\n","import { print as origPrint } from \"graphql\";\nimport { AutoCleanedWeakCache, cacheSizes, } from \"../caching/index.js\";\nimport { registerGlobalCache } from \"../caching/getMemoryInternals.js\";\nvar printCache;\nexport var print = Object.assign(function (ast) {\n var result = printCache.get(ast);\n if (!result) {\n result = origPrint(ast);\n printCache.set(ast, result);\n }\n return result;\n}, {\n reset: function () {\n printCache = new AutoCleanedWeakCache(cacheSizes.print || 2000 /* defaultCacheSizes.print */);\n },\n});\nprint.reset();\nif (globalThis.__DEV__ !== false) {\n registerGlobalCache(\"print\", function () { return (printCache ? printCache.size : 0); });\n}\n//# sourceMappingURL=print.js.map","import { __assign, __spreadArray } from \"tslib\";\nimport { print } from \"../../utilities/index.js\";\nvar defaultHttpOptions = {\n includeQuery: true,\n includeExtensions: false,\n preserveHeaderCase: false,\n};\nvar defaultHeaders = {\n // headers are case insensitive (https://stackoverflow.com/a/5259004)\n accept: \"*/*\",\n // The content-type header describes the type of the body of the request, and\n // so it typically only is sent with requests that actually have bodies. One\n // could imagine that Apollo Client would remove this header when constructing\n // a GET request (which has no body), but we historically have not done that.\n // This means that browsers will preflight all Apollo Client requests (even\n // GET requests). Apollo Server's CSRF prevention feature (introduced in\n // AS3.7) takes advantage of this fact and does not block requests with this\n // header. If you want to drop this header from GET requests, then you should\n // probably replace it with a `apollo-require-preflight` header, or servers\n // with CSRF prevention enabled might block your GET request. See\n // https://www.apollographql.com/docs/apollo-server/security/cors/#preventing-cross-site-request-forgery-csrf\n // for more details.\n \"content-type\": \"application/json\",\n};\nvar defaultOptions = {\n method: \"POST\",\n};\nexport var fallbackHttpConfig = {\n http: defaultHttpOptions,\n headers: defaultHeaders,\n options: defaultOptions,\n};\nexport var defaultPrinter = function (ast, printer) { return printer(ast); };\nexport function selectHttpOptionsAndBody(operation, fallbackConfig) {\n var configs = [];\n for (var _i = 2; _i < arguments.length; _i++) {\n configs[_i - 2] = arguments[_i];\n }\n configs.unshift(fallbackConfig);\n return selectHttpOptionsAndBodyInternal.apply(void 0, __spreadArray([operation,\n defaultPrinter], configs, false));\n}\nexport function selectHttpOptionsAndBodyInternal(operation, printer) {\n var configs = [];\n for (var _i = 2; _i < arguments.length; _i++) {\n configs[_i - 2] = arguments[_i];\n }\n var options = {};\n var http = {};\n configs.forEach(function (config) {\n options = __assign(__assign(__assign({}, options), config.options), { headers: __assign(__assign({}, options.headers), config.headers) });\n if (config.credentials) {\n options.credentials = config.credentials;\n }\n http = __assign(__assign({}, http), config.http);\n });\n if (options.headers) {\n options.headers = removeDuplicateHeaders(options.headers, http.preserveHeaderCase);\n }\n //The body depends on the http options\n var operationName = operation.operationName, extensions = operation.extensions, variables = operation.variables, query = operation.query;\n var body = { operationName: operationName, variables: variables };\n if (http.includeExtensions)\n body.extensions = extensions;\n // not sending the query (i.e persisted queries)\n if (http.includeQuery)\n body.query = printer(query, print);\n return {\n options: options,\n body: body,\n };\n}\n// Remove potential duplicate header names, preserving last (by insertion order).\n// This is done to prevent unintentionally duplicating a header instead of\n// overwriting it (See #8447 and #8449).\nfunction removeDuplicateHeaders(headers, preserveHeaderCase) {\n // If we're not preserving the case, just remove duplicates w/ normalization.\n if (!preserveHeaderCase) {\n var normalizedHeaders_1 = {};\n Object.keys(Object(headers)).forEach(function (name) {\n normalizedHeaders_1[name.toLowerCase()] = headers[name];\n });\n return normalizedHeaders_1;\n }\n // If we are preserving the case, remove duplicates w/ normalization,\n // preserving the original name.\n // This allows for non-http-spec-compliant servers that expect intentionally\n // capitalized header names (See #6741).\n var headerData = {};\n Object.keys(Object(headers)).forEach(function (name) {\n headerData[name.toLowerCase()] = {\n originalName: name,\n value: headers[name],\n };\n });\n var normalizedHeaders = {};\n Object.keys(headerData).forEach(function (name) {\n normalizedHeaders[headerData[name].originalName] = headerData[name].value;\n });\n return normalizedHeaders;\n}\n//# sourceMappingURL=selectHttpOptionsAndBody.js.map","import { Observable } from \"../../utilities/index.js\";\nexport function fromError(errorValue) {\n return new Observable(function (observer) {\n observer.error(errorValue);\n });\n}\n//# sourceMappingURL=fromError.js.map","import { __assign, __spreadArray } from \"tslib\";\nimport { invariant } from \"../globals/index.js\";\nimport { visit, Kind } from \"graphql\";\nimport { checkDocument, getOperationDefinition, getFragmentDefinition, getFragmentDefinitions, getMainDefinition, } from \"./getFromAST.js\";\nimport { isField } from \"./storeUtils.js\";\nimport { createFragmentMap } from \"./fragments.js\";\nimport { isArray, isNonEmptyArray } from \"../common/arrays.js\";\nvar TYPENAME_FIELD = {\n kind: Kind.FIELD,\n name: {\n kind: Kind.NAME,\n value: \"__typename\",\n },\n};\nfunction isEmpty(op, fragmentMap) {\n return (!op ||\n op.selectionSet.selections.every(function (selection) {\n return selection.kind === Kind.FRAGMENT_SPREAD &&\n isEmpty(fragmentMap[selection.name.value], fragmentMap);\n }));\n}\nfunction nullIfDocIsEmpty(doc) {\n return (isEmpty(getOperationDefinition(doc) || getFragmentDefinition(doc), createFragmentMap(getFragmentDefinitions(doc)))) ?\n null\n : doc;\n}\nfunction getDirectiveMatcher(configs) {\n var names = new Map();\n var tests = new Map();\n configs.forEach(function (directive) {\n if (directive) {\n if (directive.name) {\n names.set(directive.name, directive);\n }\n else if (directive.test) {\n tests.set(directive.test, directive);\n }\n }\n });\n return function (directive) {\n var config = names.get(directive.name.value);\n if (!config && tests.size) {\n tests.forEach(function (testConfig, test) {\n if (test(directive)) {\n config = testConfig;\n }\n });\n }\n return config;\n };\n}\nfunction makeInUseGetterFunction(defaultKey) {\n var map = new Map();\n return function inUseGetterFunction(key) {\n if (key === void 0) { key = defaultKey; }\n var inUse = map.get(key);\n if (!inUse) {\n map.set(key, (inUse = {\n // Variable and fragment spread names used directly within this\n // operation or fragment definition, as identified by key. These sets\n // will be populated during the first traversal of the document in\n // removeDirectivesFromDocument below.\n variables: new Set(),\n fragmentSpreads: new Set(),\n }));\n }\n return inUse;\n };\n}\nexport function removeDirectivesFromDocument(directives, doc) {\n checkDocument(doc);\n // Passing empty strings to makeInUseGetterFunction means we handle anonymous\n // operations as if their names were \"\". Anonymous fragment definitions are\n // not supposed to be possible, but the same default naming strategy seems\n // appropriate for that case as well.\n var getInUseByOperationName = makeInUseGetterFunction(\"\");\n var getInUseByFragmentName = makeInUseGetterFunction(\"\");\n var getInUse = function (ancestors) {\n for (var p = 0, ancestor = void 0; p < ancestors.length && (ancestor = ancestors[p]); ++p) {\n if (isArray(ancestor))\n continue;\n if (ancestor.kind === Kind.OPERATION_DEFINITION) {\n // If an operation is anonymous, we use the empty string as its key.\n return getInUseByOperationName(ancestor.name && ancestor.name.value);\n }\n if (ancestor.kind === Kind.FRAGMENT_DEFINITION) {\n return getInUseByFragmentName(ancestor.name.value);\n }\n }\n globalThis.__DEV__ !== false && invariant.error(86);\n return null;\n };\n var operationCount = 0;\n for (var i = doc.definitions.length - 1; i >= 0; --i) {\n if (doc.definitions[i].kind === Kind.OPERATION_DEFINITION) {\n ++operationCount;\n }\n }\n var directiveMatcher = getDirectiveMatcher(directives);\n var shouldRemoveField = function (nodeDirectives) {\n return isNonEmptyArray(nodeDirectives) &&\n nodeDirectives\n .map(directiveMatcher)\n .some(function (config) { return config && config.remove; });\n };\n var originalFragmentDefsByPath = new Map();\n // Any time the first traversal of the document below makes a change like\n // removing a fragment (by returning null), this variable should be set to\n // true. Once it becomes true, it should never be set to false again. If this\n // variable remains false throughout the traversal, then we can return the\n // original doc immediately without any modifications.\n var firstVisitMadeChanges = false;\n var fieldOrInlineFragmentVisitor = {\n enter: function (node) {\n if (shouldRemoveField(node.directives)) {\n firstVisitMadeChanges = true;\n return null;\n }\n },\n };\n var docWithoutDirectiveSubtrees = visit(doc, {\n // These two AST node types share the same implementation, defined above.\n Field: fieldOrInlineFragmentVisitor,\n InlineFragment: fieldOrInlineFragmentVisitor,\n VariableDefinition: {\n enter: function () {\n // VariableDefinition nodes do not count as variables in use, though\n // they do contain Variable nodes that might be visited below. To avoid\n // counting variable declarations as usages, we skip visiting the\n // contents of this VariableDefinition node by returning false.\n return false;\n },\n },\n Variable: {\n enter: function (node, _key, _parent, _path, ancestors) {\n var inUse = getInUse(ancestors);\n if (inUse) {\n inUse.variables.add(node.name.value);\n }\n },\n },\n FragmentSpread: {\n enter: function (node, _key, _parent, _path, ancestors) {\n if (shouldRemoveField(node.directives)) {\n firstVisitMadeChanges = true;\n return null;\n }\n var inUse = getInUse(ancestors);\n if (inUse) {\n inUse.fragmentSpreads.add(node.name.value);\n }\n // We might like to remove this FragmentSpread by returning null here if\n // the corresponding FragmentDefinition node is also going to be removed\n // by the logic below, but we can't control the relative order of those\n // events, so we have to postpone the removal of dangling FragmentSpread\n // nodes until after the current visit of the document has finished.\n },\n },\n FragmentDefinition: {\n enter: function (node, _key, _parent, path) {\n originalFragmentDefsByPath.set(JSON.stringify(path), node);\n },\n leave: function (node, _key, _parent, path) {\n var originalNode = originalFragmentDefsByPath.get(JSON.stringify(path));\n if (node === originalNode) {\n // If the FragmentNode received by this leave function is identical to\n // the one received by the corresponding enter function (above), then\n // the visitor must not have made any changes within this\n // FragmentDefinition node. This fragment definition may still be\n // removed if there are no ...spread references to it, but it won't be\n // removed just because it has only a __typename field.\n return node;\n }\n if (\n // This logic applies only if the document contains one or more\n // operations, since removing all fragments from a document containing\n // only fragments makes the document useless.\n operationCount > 0 &&\n node.selectionSet.selections.every(function (selection) {\n return selection.kind === Kind.FIELD &&\n selection.name.value === \"__typename\";\n })) {\n // This is a somewhat opinionated choice: if a FragmentDefinition ends\n // up having no fields other than __typename, we remove the whole\n // fragment definition, and later prune ...spread references to it.\n getInUseByFragmentName(node.name.value).removed = true;\n firstVisitMadeChanges = true;\n return null;\n }\n },\n },\n Directive: {\n leave: function (node) {\n // If a matching directive is found, remove the directive itself. Note\n // that this does not remove the target (field, argument, etc) of the\n // directive, but only the directive itself.\n if (directiveMatcher(node)) {\n firstVisitMadeChanges = true;\n return null;\n }\n },\n },\n });\n if (!firstVisitMadeChanges) {\n // If our first pass did not change anything about the document, then there\n // is no cleanup we need to do, and we can return the original doc.\n return doc;\n }\n // Utility for making sure inUse.transitiveVars is recursively populated.\n // Because this logic assumes inUse.fragmentSpreads has been completely\n // populated and inUse.removed has been set if appropriate,\n // populateTransitiveVars must be called after that information has been\n // collected by the first traversal of the document.\n var populateTransitiveVars = function (inUse) {\n if (!inUse.transitiveVars) {\n inUse.transitiveVars = new Set(inUse.variables);\n if (!inUse.removed) {\n inUse.fragmentSpreads.forEach(function (childFragmentName) {\n populateTransitiveVars(getInUseByFragmentName(childFragmentName)).transitiveVars.forEach(function (varName) {\n inUse.transitiveVars.add(varName);\n });\n });\n }\n }\n return inUse;\n };\n // Since we've been keeping track of fragment spreads used by particular\n // operations and fragment definitions, we now need to compute the set of all\n // spreads used (transitively) by any operations in the document.\n var allFragmentNamesUsed = new Set();\n docWithoutDirectiveSubtrees.definitions.forEach(function (def) {\n if (def.kind === Kind.OPERATION_DEFINITION) {\n populateTransitiveVars(getInUseByOperationName(def.name && def.name.value)).fragmentSpreads.forEach(function (childFragmentName) {\n allFragmentNamesUsed.add(childFragmentName);\n });\n }\n else if (def.kind === Kind.FRAGMENT_DEFINITION &&\n // If there are no operations in the document, then all fragment\n // definitions count as usages of their own fragment names. This heuristic\n // prevents accidentally removing all fragment definitions from the\n // document just because it contains no operations that use the fragments.\n operationCount === 0 &&\n !getInUseByFragmentName(def.name.value).removed) {\n allFragmentNamesUsed.add(def.name.value);\n }\n });\n // Now that we have added all fragment spreads used by operations to the\n // allFragmentNamesUsed set, we can complete the set by transitively adding\n // all fragment spreads used by those fragments, and so on.\n allFragmentNamesUsed.forEach(function (fragmentName) {\n // Once all the childFragmentName strings added here have been seen already,\n // the top-level allFragmentNamesUsed.forEach loop will terminate.\n populateTransitiveVars(getInUseByFragmentName(fragmentName)).fragmentSpreads.forEach(function (childFragmentName) {\n allFragmentNamesUsed.add(childFragmentName);\n });\n });\n var fragmentWillBeRemoved = function (fragmentName) {\n return !!(\n // A fragment definition will be removed if there are no spreads that refer\n // to it, or the fragment was explicitly removed because it had no fields\n // other than __typename.\n (!allFragmentNamesUsed.has(fragmentName) ||\n getInUseByFragmentName(fragmentName).removed));\n };\n var enterVisitor = {\n enter: function (node) {\n if (fragmentWillBeRemoved(node.name.value)) {\n return null;\n }\n },\n };\n return nullIfDocIsEmpty(visit(docWithoutDirectiveSubtrees, {\n // If the fragment is going to be removed, then leaving any dangling\n // FragmentSpread nodes with the same name would be a mistake.\n FragmentSpread: enterVisitor,\n // This is where the fragment definition is actually removed.\n FragmentDefinition: enterVisitor,\n OperationDefinition: {\n leave: function (node) {\n // Upon leaving each operation in the depth-first AST traversal, prune\n // any variables that are declared by the operation but unused within.\n if (node.variableDefinitions) {\n var usedVariableNames_1 = populateTransitiveVars(\n // If an operation is anonymous, we use the empty string as its key.\n getInUseByOperationName(node.name && node.name.value)).transitiveVars;\n // According to the GraphQL spec, all variables declared by an\n // operation must either be used by that operation or used by some\n // fragment included transitively into that operation:\n // https://spec.graphql.org/draft/#sec-All-Variables-Used\n //\n // To stay on the right side of this validation rule, if/when we\n // remove the last $var references from an operation or its fragments,\n // we must also remove the corresponding $var declaration from the\n // enclosing operation. This pruning applies only to operations and\n // not fragment definitions, at the moment. Fragments may be able to\n // declare variables eventually, but today they can only consume them.\n if (usedVariableNames_1.size < node.variableDefinitions.length) {\n return __assign(__assign({}, node), { variableDefinitions: node.variableDefinitions.filter(function (varDef) {\n return usedVariableNames_1.has(varDef.variable.name.value);\n }) });\n }\n }\n },\n },\n }));\n}\nexport var addTypenameToDocument = Object.assign(function (doc) {\n return visit(doc, {\n SelectionSet: {\n enter: function (node, _key, parent) {\n // Don't add __typename to OperationDefinitions.\n if (parent &&\n parent.kind ===\n Kind.OPERATION_DEFINITION) {\n return;\n }\n // No changes if no selections.\n var selections = node.selections;\n if (!selections) {\n return;\n }\n // If selections already have a __typename, or are part of an\n // introspection query, do nothing.\n var skip = selections.some(function (selection) {\n return (isField(selection) &&\n (selection.name.value === \"__typename\" ||\n selection.name.value.lastIndexOf(\"__\", 0) === 0));\n });\n if (skip) {\n return;\n }\n // If this SelectionSet is @export-ed as an input variable, it should\n // not have a __typename field (see issue #4691).\n var field = parent;\n if (isField(field) &&\n field.directives &&\n field.directives.some(function (d) { return d.name.value === \"export\"; })) {\n return;\n }\n // Create and return a new SelectionSet with a __typename Field.\n return __assign(__assign({}, node), { selections: __spreadArray(__spreadArray([], selections, true), [TYPENAME_FIELD], false) });\n },\n },\n });\n}, {\n added: function (field) {\n return field === TYPENAME_FIELD;\n },\n});\nvar connectionRemoveConfig = {\n test: function (directive) {\n var willRemove = directive.name.value === \"connection\";\n if (willRemove) {\n if (!directive.arguments ||\n !directive.arguments.some(function (arg) { return arg.name.value === \"key\"; })) {\n globalThis.__DEV__ !== false && invariant.warn(87);\n }\n }\n return willRemove;\n },\n};\nexport function removeConnectionDirectiveFromDocument(doc) {\n return removeDirectivesFromDocument([connectionRemoveConfig], checkDocument(doc));\n}\nfunction hasDirectivesInSelectionSet(directives, selectionSet, nestedCheck) {\n if (nestedCheck === void 0) { nestedCheck = true; }\n return (!!selectionSet &&\n selectionSet.selections &&\n selectionSet.selections.some(function (selection) {\n return hasDirectivesInSelection(directives, selection, nestedCheck);\n }));\n}\nfunction hasDirectivesInSelection(directives, selection, nestedCheck) {\n if (nestedCheck === void 0) { nestedCheck = true; }\n if (!isField(selection)) {\n return true;\n }\n if (!selection.directives) {\n return false;\n }\n return (selection.directives.some(getDirectiveMatcher(directives)) ||\n (nestedCheck &&\n hasDirectivesInSelectionSet(directives, selection.selectionSet, nestedCheck)));\n}\nfunction getArgumentMatcher(config) {\n return function argumentMatcher(argument) {\n return config.some(function (aConfig) {\n return argument.value &&\n argument.value.kind === Kind.VARIABLE &&\n argument.value.name &&\n (aConfig.name === argument.value.name.value ||\n (aConfig.test && aConfig.test(argument)));\n });\n };\n}\nexport function removeArgumentsFromDocument(config, doc) {\n var argMatcher = getArgumentMatcher(config);\n return nullIfDocIsEmpty(visit(doc, {\n OperationDefinition: {\n enter: function (node) {\n return __assign(__assign({}, node), { \n // Remove matching top level variables definitions.\n variableDefinitions: node.variableDefinitions ?\n node.variableDefinitions.filter(function (varDef) {\n return !config.some(function (arg) { return arg.name === varDef.variable.name.value; });\n })\n : [] });\n },\n },\n Field: {\n enter: function (node) {\n // If `remove` is set to true for an argument, and an argument match\n // is found for a field, remove the field as well.\n var shouldRemoveField = config.some(function (argConfig) { return argConfig.remove; });\n if (shouldRemoveField) {\n var argMatchCount_1 = 0;\n if (node.arguments) {\n node.arguments.forEach(function (arg) {\n if (argMatcher(arg)) {\n argMatchCount_1 += 1;\n }\n });\n }\n if (argMatchCount_1 === 1) {\n return null;\n }\n }\n },\n },\n Argument: {\n enter: function (node) {\n // Remove all matching arguments.\n if (argMatcher(node)) {\n return null;\n }\n },\n },\n }));\n}\nexport function removeFragmentSpreadFromDocument(config, doc) {\n function enter(node) {\n if (config.some(function (def) { return def.name === node.name.value; })) {\n return null;\n }\n }\n return nullIfDocIsEmpty(visit(doc, {\n FragmentSpread: { enter: enter },\n FragmentDefinition: { enter: enter },\n }));\n}\n// If the incoming document is a query, return it as is. Otherwise, build a\n// new document containing a query operation based on the selection set\n// of the previous main operation.\nexport function buildQueryFromSelectionSet(document) {\n var definition = getMainDefinition(document);\n var definitionOperation = definition.operation;\n if (definitionOperation === \"query\") {\n // Already a query, so return the existing document.\n return document;\n }\n // Build a new query using the selection set of the main operation.\n var modifiedDoc = visit(document, {\n OperationDefinition: {\n enter: function (node) {\n return __assign(__assign({}, node), { operation: \"query\" });\n },\n },\n });\n return modifiedDoc;\n}\n// Remove fields / selection sets that include an @client directive.\nexport function removeClientSetsFromDocument(document) {\n checkDocument(document);\n var modifiedDoc = removeDirectivesFromDocument([\n {\n test: function (directive) { return directive.name.value === \"client\"; },\n remove: true,\n },\n ], document);\n return modifiedDoc;\n}\n//# sourceMappingURL=transform.js.map","import { __assign, __rest } from \"tslib\";\nimport { invariant } from \"../../utilities/globals/index.js\";\nimport { ApolloLink } from \"../core/index.js\";\nimport { Observable, hasDirectives } from \"../../utilities/index.js\";\nimport { serializeFetchParameter } from \"./serializeFetchParameter.js\";\nimport { selectURI } from \"./selectURI.js\";\nimport { handleError, readMultipartBody, parseAndCheckHttpResponse, } from \"./parseAndCheckHttpResponse.js\";\nimport { checkFetcher } from \"./checkFetcher.js\";\nimport { selectHttpOptionsAndBodyInternal, defaultPrinter, fallbackHttpConfig, } from \"./selectHttpOptionsAndBody.js\";\nimport { rewriteURIForGET } from \"./rewriteURIForGET.js\";\nimport { fromError, filterOperationVariables } from \"../utils/index.js\";\nimport { maybe, getMainDefinition, removeClientSetsFromDocument, } from \"../../utilities/index.js\";\nvar backupFetch = maybe(function () { return fetch; });\nexport var createHttpLink = function (linkOptions) {\n if (linkOptions === void 0) { linkOptions = {}; }\n var _a = linkOptions.uri, uri = _a === void 0 ? \"/graphql\" : _a, \n // use default global fetch if nothing passed in\n preferredFetch = linkOptions.fetch, _b = linkOptions.print, print = _b === void 0 ? defaultPrinter : _b, includeExtensions = linkOptions.includeExtensions, preserveHeaderCase = linkOptions.preserveHeaderCase, useGETForQueries = linkOptions.useGETForQueries, _c = linkOptions.includeUnusedVariables, includeUnusedVariables = _c === void 0 ? false : _c, requestOptions = __rest(linkOptions, [\"uri\", \"fetch\", \"print\", \"includeExtensions\", \"preserveHeaderCase\", \"useGETForQueries\", \"includeUnusedVariables\"]);\n if (globalThis.__DEV__ !== false) {\n // Make sure at least one of preferredFetch, window.fetch, or backupFetch is\n // defined, so requests won't fail at runtime.\n checkFetcher(preferredFetch || backupFetch);\n }\n var linkConfig = {\n http: { includeExtensions: includeExtensions, preserveHeaderCase: preserveHeaderCase },\n options: requestOptions.fetchOptions,\n credentials: requestOptions.credentials,\n headers: requestOptions.headers,\n };\n return new ApolloLink(function (operation) {\n var chosenURI = selectURI(operation, uri);\n var context = operation.getContext();\n // `apollographql-client-*` headers are automatically set if a\n // `clientAwareness` object is found in the context. These headers are\n // set first, followed by the rest of the headers pulled from\n // `context.headers`. If desired, `apollographql-client-*` headers set by\n // the `clientAwareness` object can be overridden by\n // `apollographql-client-*` headers set in `context.headers`.\n var clientAwarenessHeaders = {};\n if (context.clientAwareness) {\n var _a = context.clientAwareness, name_1 = _a.name, version = _a.version;\n if (name_1) {\n clientAwarenessHeaders[\"apollographql-client-name\"] = name_1;\n }\n if (version) {\n clientAwarenessHeaders[\"apollographql-client-version\"] = version;\n }\n }\n var contextHeaders = __assign(__assign({}, clientAwarenessHeaders), context.headers);\n var contextConfig = {\n http: context.http,\n options: context.fetchOptions,\n credentials: context.credentials,\n headers: contextHeaders,\n };\n if (hasDirectives([\"client\"], operation.query)) {\n var transformedQuery = removeClientSetsFromDocument(operation.query);\n if (!transformedQuery) {\n return fromError(new Error(\"HttpLink: Trying to send a client-only query to the server. To send to the server, ensure a non-client field is added to the query or set the `transformOptions.removeClientFields` option to `true`.\"));\n }\n operation.query = transformedQuery;\n }\n //uses fallback, link, and then context to build options\n var _b = selectHttpOptionsAndBodyInternal(operation, print, fallbackHttpConfig, linkConfig, contextConfig), options = _b.options, body = _b.body;\n if (body.variables && !includeUnusedVariables) {\n body.variables = filterOperationVariables(body.variables, operation.query);\n }\n var controller;\n if (!options.signal && typeof AbortController !== \"undefined\") {\n controller = new AbortController();\n options.signal = controller.signal;\n }\n // If requested, set method to GET if there are no mutations.\n var definitionIsMutation = function (d) {\n return d.kind === \"OperationDefinition\" && d.operation === \"mutation\";\n };\n var definitionIsSubscription = function (d) {\n return d.kind === \"OperationDefinition\" && d.operation === \"subscription\";\n };\n var isSubscription = definitionIsSubscription(getMainDefinition(operation.query));\n // does not match custom directives beginning with @defer\n var hasDefer = hasDirectives([\"defer\"], operation.query);\n if (useGETForQueries &&\n !operation.query.definitions.some(definitionIsMutation)) {\n options.method = \"GET\";\n }\n if (hasDefer || isSubscription) {\n options.headers = options.headers || {};\n var acceptHeader = \"multipart/mixed;\";\n // Omit defer-specific headers if the user attempts to defer a selection\n // set on a subscription and log a warning.\n if (isSubscription && hasDefer) {\n globalThis.__DEV__ !== false && invariant.warn(39);\n }\n if (isSubscription) {\n acceptHeader +=\n \"boundary=graphql;subscriptionSpec=1.0,application/json\";\n }\n else if (hasDefer) {\n acceptHeader += \"deferSpec=20220824,application/json\";\n }\n options.headers.accept = acceptHeader;\n }\n if (options.method === \"GET\") {\n var _c = rewriteURIForGET(chosenURI, body), newURI = _c.newURI, parseError = _c.parseError;\n if (parseError) {\n return fromError(parseError);\n }\n chosenURI = newURI;\n }\n else {\n try {\n options.body = serializeFetchParameter(body, \"Payload\");\n }\n catch (parseError) {\n return fromError(parseError);\n }\n }\n return new Observable(function (observer) {\n // Prefer linkOptions.fetch (preferredFetch) if provided, and otherwise\n // fall back to the *current* global window.fetch function (see issue\n // #7832), or (if all else fails) the backupFetch function we saved when\n // this module was first evaluated. This last option protects against the\n // removal of window.fetch, which is unlikely but not impossible.\n var currentFetch = preferredFetch || maybe(function () { return fetch; }) || backupFetch;\n var observerNext = observer.next.bind(observer);\n currentFetch(chosenURI, options)\n .then(function (response) {\n var _a;\n operation.setContext({ response: response });\n var ctype = (_a = response.headers) === null || _a === void 0 ? void 0 : _a.get(\"content-type\");\n if (ctype !== null && /^multipart\\/mixed/i.test(ctype)) {\n return readMultipartBody(response, observerNext);\n }\n else {\n return parseAndCheckHttpResponse(operation)(response).then(observerNext);\n }\n })\n .then(function () {\n controller = undefined;\n observer.complete();\n })\n .catch(function (err) {\n controller = undefined;\n handleError(err, observer);\n });\n return function () {\n // XXX support canceling this request\n // https://developers.google.com/web/updates/2017/09/abortable-fetch\n if (controller)\n controller.abort();\n };\n });\n });\n};\n//# sourceMappingURL=createHttpLink.js.map","export var selectURI = function (operation, fallbackURI) {\n var context = operation.getContext();\n var contextURI = context.uri;\n if (contextURI) {\n return contextURI;\n }\n else if (typeof fallbackURI === \"function\") {\n return fallbackURI(operation);\n }\n else {\n return fallbackURI || \"/graphql\";\n }\n};\n//# sourceMappingURL=selectURI.js.map","import { __assign } from \"tslib\";\nimport { visit } from \"graphql\";\nexport function filterOperationVariables(variables, query) {\n var result = __assign({}, variables);\n var unusedNames = new Set(Object.keys(variables));\n visit(query, {\n Variable: function (node, _key, parent) {\n // A variable type definition at the top level of a query is not\n // enough to silence server-side errors about the variable being\n // unused, so variable definitions do not count as usage.\n // https://spec.graphql.org/draft/#sec-All-Variables-Used\n if (parent &&\n parent.kind !== \"VariableDefinition\") {\n unusedNames.delete(node.name.value);\n }\n },\n });\n unusedNames.forEach(function (name) {\n delete result[name];\n });\n return result;\n}\n//# sourceMappingURL=filterOperationVariables.js.map","import { serializeFetchParameter } from \"./serializeFetchParameter.js\";\n// For GET operations, returns the given URI rewritten with parameters, or a\n// parse error.\nexport function rewriteURIForGET(chosenURI, body) {\n // Implement the standard HTTP GET serialization, plus 'extensions'. Note\n // the extra level of JSON serialization!\n var queryParams = [];\n var addQueryParam = function (key, value) {\n queryParams.push(\"\".concat(key, \"=\").concat(encodeURIComponent(value)));\n };\n if (\"query\" in body) {\n addQueryParam(\"query\", body.query);\n }\n if (body.operationName) {\n addQueryParam(\"operationName\", body.operationName);\n }\n if (body.variables) {\n var serializedVariables = void 0;\n try {\n serializedVariables = serializeFetchParameter(body.variables, \"Variables map\");\n }\n catch (parseError) {\n return { parseError: parseError };\n }\n addQueryParam(\"variables\", serializedVariables);\n }\n if (body.extensions) {\n var serializedExtensions = void 0;\n try {\n serializedExtensions = serializeFetchParameter(body.extensions, \"Extensions map\");\n }\n catch (parseError) {\n return { parseError: parseError };\n }\n addQueryParam(\"extensions\", serializedExtensions);\n }\n // Reconstruct the URI with added query params.\n // XXX This assumes that the URI is well-formed and that it doesn't\n // already contain any of these query params. We could instead use the\n // URL API and take a polyfill (whatwg-url@6) for older browsers that\n // don't support URLSearchParams. Note that some browsers (and\n // versions of whatwg-url) support URL but not URLSearchParams!\n var fragment = \"\", preFragment = chosenURI;\n var fragmentStart = chosenURI.indexOf(\"#\");\n if (fragmentStart !== -1) {\n fragment = chosenURI.substr(fragmentStart);\n preFragment = chosenURI.substr(0, fragmentStart);\n }\n var queryParamsPrefix = preFragment.indexOf(\"?\") === -1 ? \"?\" : \"&\";\n var newURI = preFragment + queryParamsPrefix + queryParams.join(\"&\") + fragment;\n return { newURI: newURI };\n}\n//# sourceMappingURL=rewriteURIForGET.js.map","import { __extends } from \"tslib\";\nimport { ApolloLink } from \"../core/index.js\";\nimport { createHttpLink } from \"./createHttpLink.js\";\nvar HttpLink = /** @class */ (function (_super) {\n __extends(HttpLink, _super);\n function HttpLink(options) {\n if (options === void 0) { options = {}; }\n var _this = _super.call(this, createHttpLink(options).request) || this;\n _this.options = options;\n return _this;\n }\n return HttpLink;\n}(ApolloLink));\nexport { HttpLink };\n//# sourceMappingURL=HttpLink.js.map","// A [trie](https://en.wikipedia.org/wiki/Trie) data structure that holds\n// object keys weakly, yet can also hold non-object keys, unlike the\n// native `WeakMap`.\n// If no makeData function is supplied, the looked-up data will be an empty,\n// null-prototype Object.\nconst defaultMakeData = () => Object.create(null);\n// Useful for processing arguments objects as well as arrays.\nconst { forEach, slice } = Array.prototype;\nconst { hasOwnProperty } = Object.prototype;\nexport class Trie {\n constructor(weakness = true, makeData = defaultMakeData) {\n this.weakness = weakness;\n this.makeData = makeData;\n }\n lookup() {\n return this.lookupArray(arguments);\n }\n lookupArray(array) {\n let node = this;\n forEach.call(array, key => node = node.getChildTrie(key));\n return hasOwnProperty.call(node, \"data\")\n ? node.data\n : node.data = this.makeData(slice.call(array));\n }\n peek() {\n return this.peekArray(arguments);\n }\n peekArray(array) {\n let node = this;\n for (let i = 0, len = array.length; node && i < len; ++i) {\n const map = node.mapFor(array[i], false);\n node = map && map.get(array[i]);\n }\n return node && node.data;\n }\n remove() {\n return this.removeArray(arguments);\n }\n removeArray(array) {\n let data;\n if (array.length) {\n const head = array[0];\n const map = this.mapFor(head, false);\n const child = map && map.get(head);\n if (child) {\n data = child.removeArray(slice.call(array, 1));\n if (!child.data && !child.weak && !(child.strong && child.strong.size)) {\n map.delete(head);\n }\n }\n }\n else {\n data = this.data;\n delete this.data;\n }\n return data;\n }\n getChildTrie(key) {\n const map = this.mapFor(key, true);\n let child = map.get(key);\n if (!child)\n map.set(key, child = new Trie(this.weakness, this.makeData));\n return child;\n }\n mapFor(key, create) {\n return this.weakness && isObjRef(key)\n ? this.weak || (create ? this.weak = new WeakMap : void 0)\n : this.strong || (create ? this.strong = new Map : void 0);\n }\n}\nfunction isObjRef(value) {\n switch (typeof value) {\n case \"object\":\n if (value === null)\n break;\n // Fall through to return true...\n case \"function\":\n return true;\n }\n return false;\n}\n//# sourceMappingURL=index.js.map","// A [trie](https://en.wikipedia.org/wiki/Trie) data structure that holds\n// object keys weakly, yet can also hold non-object keys, unlike the\n// native `WeakMap`.\n// If no makeData function is supplied, the looked-up data will be an empty,\n// null-prototype Object.\nconst defaultMakeData = () => Object.create(null);\n// Useful for processing arguments objects as well as arrays.\nconst { forEach, slice } = Array.prototype;\nconst { hasOwnProperty } = Object.prototype;\nexport class Trie {\n constructor(weakness = true, makeData = defaultMakeData) {\n this.weakness = weakness;\n this.makeData = makeData;\n }\n lookup(...array) {\n return this.lookupArray(array);\n }\n lookupArray(array) {\n let node = this;\n forEach.call(array, key => node = node.getChildTrie(key));\n return hasOwnProperty.call(node, \"data\")\n ? node.data\n : node.data = this.makeData(slice.call(array));\n }\n peek(...array) {\n return this.peekArray(array);\n }\n peekArray(array) {\n let node = this;\n for (let i = 0, len = array.length; node && i < len; ++i) {\n const map = this.weakness && isObjRef(array[i]) ? node.weak : node.strong;\n node = map && map.get(array[i]);\n }\n return node && node.data;\n }\n getChildTrie(key) {\n const map = this.weakness && isObjRef(key)\n ? this.weak || (this.weak = new WeakMap())\n : this.strong || (this.strong = new Map());\n let child = map.get(key);\n if (!child)\n map.set(key, child = new Trie(this.weakness, this.makeData));\n return child;\n }\n}\nfunction isObjRef(value) {\n switch (typeof value) {\n case \"object\":\n if (value === null)\n break;\n // Fall through to return true...\n case \"function\":\n return true;\n }\n return false;\n}\n//# sourceMappingURL=index.js.map","// This currentContext variable will only be used if the makeSlotClass\n// function is called, which happens only if this is the first copy of the\n// @wry/context package to be imported.\nlet currentContext = null;\n// This unique internal object is used to denote the absence of a value\n// for a given Slot, and is never exposed to outside code.\nconst MISSING_VALUE = {};\nlet idCounter = 1;\n// Although we can't do anything about the cost of duplicated code from\n// accidentally bundling multiple copies of the @wry/context package, we can\n// avoid creating the Slot class more than once using makeSlotClass.\nconst makeSlotClass = () => class Slot {\n constructor() {\n // If you have a Slot object, you can find out its slot.id, but you cannot\n // guess the slot.id of a Slot you don't have access to, thanks to the\n // randomized suffix.\n this.id = [\n \"slot\",\n idCounter++,\n Date.now(),\n Math.random().toString(36).slice(2),\n ].join(\":\");\n }\n hasValue() {\n for (let context = currentContext; context; context = context.parent) {\n // We use the Slot object iself as a key to its value, which means the\n // value cannot be obtained without a reference to the Slot object.\n if (this.id in context.slots) {\n const value = context.slots[this.id];\n if (value === MISSING_VALUE)\n break;\n if (context !== currentContext) {\n // Cache the value in currentContext.slots so the next lookup will\n // be faster. This caching is safe because the tree of contexts and\n // the values of the slots are logically immutable.\n currentContext.slots[this.id] = value;\n }\n return true;\n }\n }\n if (currentContext) {\n // If a value was not found for this Slot, it's never going to be found\n // no matter how many times we look it up, so we might as well cache\n // the absence of the value, too.\n currentContext.slots[this.id] = MISSING_VALUE;\n }\n return false;\n }\n getValue() {\n if (this.hasValue()) {\n return currentContext.slots[this.id];\n }\n }\n withValue(value, callback, \n // Given the prevalence of arrow functions, specifying arguments is likely\n // to be much more common than specifying `this`, hence this ordering:\n args, thisArg) {\n const slots = {\n __proto__: null,\n [this.id]: value,\n };\n const parent = currentContext;\n currentContext = { parent, slots };\n try {\n // Function.prototype.apply allows the arguments array argument to be\n // omitted or undefined, so args! is fine here.\n return callback.apply(thisArg, args);\n }\n finally {\n currentContext = parent;\n }\n }\n // Capture the current context and wrap a callback function so that it\n // reestablishes the captured context when called.\n static bind(callback) {\n const context = currentContext;\n return function () {\n const saved = currentContext;\n try {\n currentContext = context;\n return callback.apply(this, arguments);\n }\n finally {\n currentContext = saved;\n }\n };\n }\n // Immediately run a callback function without any captured context.\n static noContext(callback, \n // Given the prevalence of arrow functions, specifying arguments is likely\n // to be much more common than specifying `this`, hence this ordering:\n args, thisArg) {\n if (currentContext) {\n const saved = currentContext;\n try {\n currentContext = null;\n // Function.prototype.apply allows the arguments array argument to be\n // omitted or undefined, so args! is fine here.\n return callback.apply(thisArg, args);\n }\n finally {\n currentContext = saved;\n }\n }\n else {\n return callback.apply(thisArg, args);\n }\n }\n};\nfunction maybe(fn) {\n try {\n return fn();\n }\n catch (ignored) { }\n}\n// We store a single global implementation of the Slot class as a permanent\n// non-enumerable property of the globalThis object. This obfuscation does\n// nothing to prevent access to the Slot class, but at least it ensures the\n// implementation (i.e. currentContext) cannot be tampered with, and all copies\n// of the @wry/context package (hopefully just one) will share the same Slot\n// implementation. Since the first copy of the @wry/context package to be\n// imported wins, this technique imposes a steep cost for any future breaking\n// changes to the Slot class.\nconst globalKey = \"@wry/context:Slot\";\nconst host = \n// Prefer globalThis when available.\n// https://github.com/benjamn/wryware/issues/347\nmaybe(() => globalThis) ||\n // Fall back to global, which works in Node.js and may be converted by some\n // bundlers to the appropriate identifier (window, self, ...) depending on the\n // bundling target. https://github.com/endojs/endo/issues/576#issuecomment-1178515224\n maybe(() => global) ||\n // Otherwise, use a dummy host that's local to this module. We used to fall\n // back to using the Array constructor as a namespace, but that was flagged in\n // https://github.com/benjamn/wryware/issues/347, and can be avoided.\n Object.create(null);\n// Whichever globalHost we're using, make TypeScript happy about the additional\n// globalKey property.\nconst globalHost = host;\nexport const Slot = globalHost[globalKey] ||\n // Earlier versions of this package stored the globalKey property on the Array\n // constructor, so we check there as well, to prevent Slot class duplication.\n Array[globalKey] ||\n (function (Slot) {\n try {\n Object.defineProperty(globalHost, globalKey, {\n value: Slot,\n enumerable: false,\n writable: false,\n // When it was possible for globalHost to be the Array constructor (a\n // legacy Slot dedup strategy), it was important for the property to be\n // configurable:true so it could be deleted. That does not seem to be as\n // important when globalHost is the global object, but I don't want to\n // cause similar problems again, and configurable:true seems safest.\n // https://github.com/endojs/endo/issues/576#issuecomment-1178274008\n configurable: true\n });\n }\n finally {\n return Slot;\n }\n })(makeSlotClass());\n//# sourceMappingURL=slot.js.map","import { Slot } from \"./slot.js\";\nexport { Slot };\nexport const { bind, noContext } = Slot;\n// Like global.setTimeout, except the callback runs with captured context.\nexport { setTimeoutWithContext as setTimeout };\nfunction setTimeoutWithContext(callback, delay) {\n return setTimeout(bind(callback), delay);\n}\n// Turn any generator function into an async function (using yield instead\n// of await), with context automatically preserved across yields.\nexport function asyncFromGen(genFn) {\n return function () {\n const gen = genFn.apply(this, arguments);\n const boundNext = bind(gen.next);\n const boundThrow = bind(gen.throw);\n return new Promise((resolve, reject) => {\n function invoke(method, argument) {\n try {\n var result = method.call(gen, argument);\n }\n catch (error) {\n return reject(error);\n }\n const next = result.done ? resolve : invokeNext;\n if (isPromiseLike(result.value)) {\n result.value.then(next, result.done ? reject : invokeThrow);\n }\n else {\n next(result.value);\n }\n }\n const invokeNext = (value) => invoke(boundNext, value);\n const invokeThrow = (error) => invoke(boundThrow, error);\n invokeNext();\n });\n };\n}\nfunction isPromiseLike(value) {\n return value && typeof value.then === \"function\";\n}\n// If you use the fibers npm package to implement coroutines in Node.js,\n// you should call this function at least once to ensure context management\n// remains coherent across any yields.\nconst wrappedFibers = [];\nexport function wrapYieldingFiberMethods(Fiber) {\n // There can be only one implementation of Fiber per process, so this array\n // should never grow longer than one element.\n if (wrappedFibers.indexOf(Fiber) < 0) {\n const wrap = (obj, method) => {\n const fn = obj[method];\n obj[method] = function () {\n return noContext(fn, arguments, this);\n };\n };\n // These methods can yield, according to\n // https://github.com/laverdet/node-fibers/blob/ddebed9b8ae3883e57f822e2108e6943e5c8d2a8/fibers.js#L97-L100\n wrap(Fiber, \"yield\");\n wrap(Fiber.prototype, \"run\");\n wrap(Fiber.prototype, \"throwInto\");\n wrappedFibers.push(Fiber);\n }\n return Fiber;\n}\n//# sourceMappingURL=index.js.map","import { Slot } from \"@wry/context\";\nexport const parentEntrySlot = new Slot();\nexport function nonReactive(fn) {\n return parentEntrySlot.withValue(void 0, fn);\n}\nexport { Slot };\nexport { bind as bindContext, noContext, setTimeout, asyncFromGen, } from \"@wry/context\";\n//# sourceMappingURL=context.js.map","export const { hasOwnProperty, } = Object.prototype;\nexport const arrayFromSet = Array.from ||\n function (set) {\n const array = [];\n set.forEach(item => array.push(item));\n return array;\n };\nexport function maybeUnsubscribe(entryOrDep) {\n const { unsubscribe } = entryOrDep;\n if (typeof unsubscribe === \"function\") {\n entryOrDep.unsubscribe = void 0;\n unsubscribe();\n }\n}\n//# sourceMappingURL=helpers.js.map","import { parentEntrySlot } from \"./context.js\";\nimport { maybeUnsubscribe, arrayFromSet } from \"./helpers.js\";\nconst emptySetPool = [];\nconst POOL_TARGET_SIZE = 100;\n// Since this package might be used browsers, we should avoid using the\n// Node built-in assert module.\nfunction assert(condition, optionalMessage) {\n if (!condition) {\n throw new Error(optionalMessage || \"assertion failure\");\n }\n}\nfunction valueIs(a, b) {\n const len = a.length;\n return (\n // Unknown values are not equal to each other.\n len > 0 &&\n // Both values must be ordinary (or both exceptional) to be equal.\n len === b.length &&\n // The underlying value or exception must be the same.\n a[len - 1] === b[len - 1]);\n}\nfunction valueGet(value) {\n switch (value.length) {\n case 0: throw new Error(\"unknown value\");\n case 1: return value[0];\n case 2: throw value[1];\n }\n}\nfunction valueCopy(value) {\n return value.slice(0);\n}\nexport class Entry {\n constructor(fn) {\n this.fn = fn;\n this.parents = new Set();\n this.childValues = new Map();\n // When this Entry has children that are dirty, this property becomes\n // a Set containing other Entry objects, borrowed from emptySetPool.\n // When the set becomes empty, it gets recycled back to emptySetPool.\n this.dirtyChildren = null;\n this.dirty = true;\n this.recomputing = false;\n this.value = [];\n this.deps = null;\n ++Entry.count;\n }\n peek() {\n if (this.value.length === 1 && !mightBeDirty(this)) {\n rememberParent(this);\n return this.value[0];\n }\n }\n // This is the most important method of the Entry API, because it\n // determines whether the cached this.value can be returned immediately,\n // or must be recomputed. The overall performance of the caching system\n // depends on the truth of the following observations: (1) this.dirty is\n // usually false, (2) this.dirtyChildren is usually null/empty, and thus\n // (3) valueGet(this.value) is usually returned without recomputation.\n recompute(args) {\n assert(!this.recomputing, \"already recomputing\");\n rememberParent(this);\n return mightBeDirty(this)\n ? reallyRecompute(this, args)\n : valueGet(this.value);\n }\n setDirty() {\n if (this.dirty)\n return;\n this.dirty = true;\n reportDirty(this);\n // We can go ahead and unsubscribe here, since any further dirty\n // notifications we receive will be redundant, and unsubscribing may\n // free up some resources, e.g. file watchers.\n maybeUnsubscribe(this);\n }\n dispose() {\n this.setDirty();\n // Sever any dependency relationships with our own children, so those\n // children don't retain this parent Entry in their child.parents sets,\n // thereby preventing it from being fully garbage collected.\n forgetChildren(this);\n // Because this entry has been kicked out of the cache (in index.js),\n // we've lost the ability to find out if/when this entry becomes dirty,\n // whether that happens through a subscription, because of a direct call\n // to entry.setDirty(), or because one of its children becomes dirty.\n // Because of this loss of future information, we have to assume the\n // worst (that this entry might have become dirty very soon), so we must\n // immediately mark this entry's parents as dirty. Normally we could\n // just call entry.setDirty() rather than calling parent.setDirty() for\n // each parent, but that would leave this entry in parent.childValues\n // and parent.dirtyChildren, which would prevent the child from being\n // truly forgotten.\n eachParent(this, (parent, child) => {\n parent.setDirty();\n forgetChild(parent, this);\n });\n }\n forget() {\n // The code that creates Entry objects in index.ts will replace this method\n // with one that actually removes the Entry from the cache, which will also\n // trigger the entry.dispose method.\n this.dispose();\n }\n dependOn(dep) {\n dep.add(this);\n if (!this.deps) {\n this.deps = emptySetPool.pop() || new Set();\n }\n this.deps.add(dep);\n }\n forgetDeps() {\n if (this.deps) {\n arrayFromSet(this.deps).forEach(dep => dep.delete(this));\n this.deps.clear();\n emptySetPool.push(this.deps);\n this.deps = null;\n }\n }\n}\nEntry.count = 0;\nfunction rememberParent(child) {\n const parent = parentEntrySlot.getValue();\n if (parent) {\n child.parents.add(parent);\n if (!parent.childValues.has(child)) {\n parent.childValues.set(child, []);\n }\n if (mightBeDirty(child)) {\n reportDirtyChild(parent, child);\n }\n else {\n reportCleanChild(parent, child);\n }\n return parent;\n }\n}\nfunction reallyRecompute(entry, args) {\n forgetChildren(entry);\n // Set entry as the parent entry while calling recomputeNewValue(entry).\n parentEntrySlot.withValue(entry, recomputeNewValue, [entry, args]);\n if (maybeSubscribe(entry, args)) {\n // If we successfully recomputed entry.value and did not fail to\n // (re)subscribe, then this Entry is no longer explicitly dirty.\n setClean(entry);\n }\n return valueGet(entry.value);\n}\nfunction recomputeNewValue(entry, args) {\n entry.recomputing = true;\n const { normalizeResult } = entry;\n let oldValueCopy;\n if (normalizeResult && entry.value.length === 1) {\n oldValueCopy = valueCopy(entry.value);\n }\n // Make entry.value an empty array, representing an unknown value.\n entry.value.length = 0;\n try {\n // If entry.fn succeeds, entry.value will become a normal Value.\n entry.value[0] = entry.fn.apply(null, args);\n // If we have a viable oldValueCopy to compare with the (successfully\n // recomputed) new entry.value, and they are not already === identical, give\n // normalizeResult a chance to pick/choose/reuse parts of oldValueCopy[0]\n // and/or entry.value[0] to determine the final cached entry.value.\n if (normalizeResult && oldValueCopy && !valueIs(oldValueCopy, entry.value)) {\n try {\n entry.value[0] = normalizeResult(entry.value[0], oldValueCopy[0]);\n }\n catch (_a) {\n // If normalizeResult throws, just use the newer value, rather than\n // saving the exception as entry.value[1].\n }\n }\n }\n catch (e) {\n // If entry.fn throws, entry.value will hold that exception.\n entry.value[1] = e;\n }\n // Either way, this line is always reached.\n entry.recomputing = false;\n}\nfunction mightBeDirty(entry) {\n return entry.dirty || !!(entry.dirtyChildren && entry.dirtyChildren.size);\n}\nfunction setClean(entry) {\n entry.dirty = false;\n if (mightBeDirty(entry)) {\n // This Entry may still have dirty children, in which case we can't\n // let our parents know we're clean just yet.\n return;\n }\n reportClean(entry);\n}\nfunction reportDirty(child) {\n eachParent(child, reportDirtyChild);\n}\nfunction reportClean(child) {\n eachParent(child, reportCleanChild);\n}\nfunction eachParent(child, callback) {\n const parentCount = child.parents.size;\n if (parentCount) {\n const parents = arrayFromSet(child.parents);\n for (let i = 0; i < parentCount; ++i) {\n callback(parents[i], child);\n }\n }\n}\n// Let a parent Entry know that one of its children may be dirty.\nfunction reportDirtyChild(parent, child) {\n // Must have called rememberParent(child) before calling\n // reportDirtyChild(parent, child).\n assert(parent.childValues.has(child));\n assert(mightBeDirty(child));\n const parentWasClean = !mightBeDirty(parent);\n if (!parent.dirtyChildren) {\n parent.dirtyChildren = emptySetPool.pop() || new Set;\n }\n else if (parent.dirtyChildren.has(child)) {\n // If we already know this child is dirty, then we must have already\n // informed our own parents that we are dirty, so we can terminate\n // the recursion early.\n return;\n }\n parent.dirtyChildren.add(child);\n // If parent was clean before, it just became (possibly) dirty (according to\n // mightBeDirty), since we just added child to parent.dirtyChildren.\n if (parentWasClean) {\n reportDirty(parent);\n }\n}\n// Let a parent Entry know that one of its children is no longer dirty.\nfunction reportCleanChild(parent, child) {\n // Must have called rememberChild(child) before calling\n // reportCleanChild(parent, child).\n assert(parent.childValues.has(child));\n assert(!mightBeDirty(child));\n const childValue = parent.childValues.get(child);\n if (childValue.length === 0) {\n parent.childValues.set(child, valueCopy(child.value));\n }\n else if (!valueIs(childValue, child.value)) {\n parent.setDirty();\n }\n removeDirtyChild(parent, child);\n if (mightBeDirty(parent)) {\n return;\n }\n reportClean(parent);\n}\nfunction removeDirtyChild(parent, child) {\n const dc = parent.dirtyChildren;\n if (dc) {\n dc.delete(child);\n if (dc.size === 0) {\n if (emptySetPool.length < POOL_TARGET_SIZE) {\n emptySetPool.push(dc);\n }\n parent.dirtyChildren = null;\n }\n }\n}\n// Removes all children from this entry and returns an array of the\n// removed children.\nfunction forgetChildren(parent) {\n if (parent.childValues.size > 0) {\n parent.childValues.forEach((_value, child) => {\n forgetChild(parent, child);\n });\n }\n // Remove this parent Entry from any sets to which it was added by the\n // addToSet method.\n parent.forgetDeps();\n // After we forget all our children, this.dirtyChildren must be empty\n // and therefore must have been reset to null.\n assert(parent.dirtyChildren === null);\n}\nfunction forgetChild(parent, child) {\n child.parents.delete(parent);\n parent.childValues.delete(child);\n removeDirtyChild(parent, child);\n}\nfunction maybeSubscribe(entry, args) {\n if (typeof entry.subscribe === \"function\") {\n try {\n maybeUnsubscribe(entry); // Prevent double subscriptions.\n entry.unsubscribe = entry.subscribe.apply(null, args);\n }\n catch (e) {\n // If this Entry has a subscribe function and it threw an exception\n // (or an unsubscribe function it previously returned now throws),\n // return false to indicate that we were not able to subscribe (or\n // unsubscribe), and this Entry should remain dirty.\n entry.setDirty();\n return false;\n }\n }\n // Returning true indicates either that there was no entry.subscribe\n // function or that it succeeded.\n return true;\n}\n//# sourceMappingURL=entry.js.map","import { parentEntrySlot } from \"./context.js\";\nimport { hasOwnProperty, maybeUnsubscribe, arrayFromSet, } from \"./helpers.js\";\nconst EntryMethods = {\n setDirty: true,\n dispose: true,\n forget: true, // Fully remove parent Entry from LRU cache and computation graph\n};\nexport function dep(options) {\n const depsByKey = new Map();\n const subscribe = options && options.subscribe;\n function depend(key) {\n const parent = parentEntrySlot.getValue();\n if (parent) {\n let dep = depsByKey.get(key);\n if (!dep) {\n depsByKey.set(key, dep = new Set);\n }\n parent.dependOn(dep);\n if (typeof subscribe === \"function\") {\n maybeUnsubscribe(dep);\n dep.unsubscribe = subscribe(key);\n }\n }\n }\n depend.dirty = function dirty(key, entryMethodName) {\n const dep = depsByKey.get(key);\n if (dep) {\n const m = (entryMethodName &&\n hasOwnProperty.call(EntryMethods, entryMethodName)) ? entryMethodName : \"setDirty\";\n // We have to use arrayFromSet(dep).forEach instead of dep.forEach,\n // because modifying a Set while iterating over it can cause elements in\n // the Set to be removed from the Set before they've been iterated over.\n arrayFromSet(dep).forEach(entry => entry[m]());\n depsByKey.delete(key);\n maybeUnsubscribe(dep);\n }\n };\n return depend;\n}\n//# sourceMappingURL=dep.js.map","import { Trie } from \"@wry/trie\";\nimport { StrongCache } from \"@wry/caches\";\nimport { Entry } from \"./entry.js\";\nimport { parentEntrySlot } from \"./context.js\";\n// These helper functions are important for making optimism work with\n// asynchronous code. In order to register parent-child dependencies,\n// optimism needs to know about any currently active parent computations.\n// In ordinary synchronous code, the parent context is implicit in the\n// execution stack, but asynchronous code requires some extra guidance in\n// order to propagate context from one async task segment to the next.\nexport { bindContext, noContext, nonReactive, setTimeout, asyncFromGen, Slot, } from \"./context.js\";\n// A lighter-weight dependency, similar to OptimisticWrapperFunction, except\n// with only one argument, no makeCacheKey, no wrapped function to recompute,\n// and no result value. Useful for representing dependency leaves in the graph\n// of computation. Subscriptions are supported.\nexport { dep } from \"./dep.js\";\n// The defaultMakeCacheKey function is remarkably powerful, because it gives\n// a unique object for any shallow-identical list of arguments. If you need\n// to implement a custom makeCacheKey function, you may find it helpful to\n// delegate the final work to defaultMakeCacheKey, which is why we export it\n// here. However, you may want to avoid defaultMakeCacheKey if your runtime\n// does not support WeakMap, or you have the ability to return a string key.\n// In those cases, just write your own custom makeCacheKey functions.\nlet defaultKeyTrie;\nexport function defaultMakeCacheKey(...args) {\n const trie = defaultKeyTrie || (defaultKeyTrie = new Trie(typeof WeakMap === \"function\"));\n return trie.lookupArray(args);\n}\n// If you're paranoid about memory leaks, or you want to avoid using WeakMap\n// under the hood, but you still need the behavior of defaultMakeCacheKey,\n// import this constructor to create your own tries.\nexport { Trie as KeyTrie };\n;\nconst caches = new Set();\nexport function wrap(originalFunction, { max = Math.pow(2, 16), keyArgs, makeCacheKey = defaultMakeCacheKey, normalizeResult, subscribe, cache: cacheOption = StrongCache, } = Object.create(null)) {\n const cache = typeof cacheOption === \"function\"\n ? new cacheOption(max, entry => entry.dispose())\n : cacheOption;\n const optimistic = function () {\n const key = makeCacheKey.apply(null, keyArgs ? keyArgs.apply(null, arguments) : arguments);\n if (key === void 0) {\n return originalFunction.apply(null, arguments);\n }\n let entry = cache.get(key);\n if (!entry) {\n cache.set(key, entry = new Entry(originalFunction));\n entry.normalizeResult = normalizeResult;\n entry.subscribe = subscribe;\n // Give the Entry the ability to trigger cache.delete(key), even though\n // the Entry itself does not know about key or cache.\n entry.forget = () => cache.delete(key);\n }\n const value = entry.recompute(Array.prototype.slice.call(arguments));\n // Move this entry to the front of the least-recently used queue,\n // since we just finished computing its value.\n cache.set(key, entry);\n caches.add(cache);\n // Clean up any excess entries in the cache, but only if there is no\n // active parent entry, meaning we're not in the middle of a larger\n // computation that might be flummoxed by the cleaning.\n if (!parentEntrySlot.hasValue()) {\n caches.forEach(cache => cache.clean());\n caches.clear();\n }\n return value;\n };\n Object.defineProperty(optimistic, \"size\", {\n get: () => cache.size,\n configurable: false,\n enumerable: false,\n });\n Object.freeze(optimistic.options = {\n max,\n keyArgs,\n makeCacheKey,\n normalizeResult,\n subscribe,\n cache,\n });\n function dirtyKey(key) {\n const entry = key && cache.get(key);\n if (entry) {\n entry.setDirty();\n }\n }\n optimistic.dirtyKey = dirtyKey;\n optimistic.dirty = function dirty() {\n dirtyKey(makeCacheKey.apply(null, arguments));\n };\n function peekKey(key) {\n const entry = key && cache.get(key);\n if (entry) {\n return entry.peek();\n }\n }\n optimistic.peekKey = peekKey;\n optimistic.peek = function peek() {\n return peekKey(makeCacheKey.apply(null, arguments));\n };\n function forgetKey(key) {\n return key ? cache.delete(key) : false;\n }\n optimistic.forgetKey = forgetKey;\n optimistic.forget = function forget() {\n return forgetKey(makeCacheKey.apply(null, arguments));\n };\n optimistic.makeCacheKey = makeCacheKey;\n optimistic.getKey = keyArgs ? function getKey() {\n return makeCacheKey.apply(null, keyArgs.apply(null, arguments));\n } : makeCacheKey;\n return Object.freeze(optimistic);\n}\n//# sourceMappingURL=index.js.map","import { Trie } from \"@wry/trie\";\nimport { canUseWeakMap, canUseWeakSet } from \"../common/canUse.js\";\nimport { checkDocument } from \"./getFromAST.js\";\nimport { invariant } from \"../globals/index.js\";\nimport { WeakCache } from \"@wry/caches\";\nimport { wrap } from \"optimism\";\nimport { cacheSizes } from \"../caching/index.js\";\nfunction identity(document) {\n return document;\n}\nvar DocumentTransform = /** @class */ (function () {\n function DocumentTransform(transform, options) {\n if (options === void 0) { options = Object.create(null); }\n this.resultCache = canUseWeakSet ? new WeakSet() : new Set();\n this.transform = transform;\n if (options.getCacheKey) {\n // Override default `getCacheKey` function, which returns [document].\n this.getCacheKey = options.getCacheKey;\n }\n this.cached = options.cache !== false;\n this.resetCache();\n }\n // This default implementation of getCacheKey can be overridden by providing\n // options.getCacheKey to the DocumentTransform constructor. In general, a\n // getCacheKey function may either return an array of keys (often including\n // the document) to be used as a cache key, or undefined to indicate the\n // transform for this document should not be cached.\n DocumentTransform.prototype.getCacheKey = function (document) {\n return [document];\n };\n DocumentTransform.identity = function () {\n // No need to cache this transform since it just returns the document\n // unchanged. This should save a bit of memory that would otherwise be\n // needed to populate the `documentCache` of this transform.\n return new DocumentTransform(identity, { cache: false });\n };\n DocumentTransform.split = function (predicate, left, right) {\n if (right === void 0) { right = DocumentTransform.identity(); }\n return Object.assign(new DocumentTransform(function (document) {\n var documentTransform = predicate(document) ? left : right;\n return documentTransform.transformDocument(document);\n }, \n // Reasonably assume both `left` and `right` transforms handle their own caching\n { cache: false }), { left: left, right: right });\n };\n /**\n * Resets the internal cache of this transform, if it has one.\n */\n DocumentTransform.prototype.resetCache = function () {\n var _this = this;\n if (this.cached) {\n var stableCacheKeys_1 = new Trie(canUseWeakMap);\n this.performWork = wrap(DocumentTransform.prototype.performWork.bind(this), {\n makeCacheKey: function (document) {\n var cacheKeys = _this.getCacheKey(document);\n if (cacheKeys) {\n invariant(Array.isArray(cacheKeys), 69);\n return stableCacheKeys_1.lookupArray(cacheKeys);\n }\n },\n max: cacheSizes[\"documentTransform.cache\"],\n cache: (WeakCache),\n });\n }\n };\n DocumentTransform.prototype.performWork = function (document) {\n checkDocument(document);\n return this.transform(document);\n };\n DocumentTransform.prototype.transformDocument = function (document) {\n // If a user passes an already transformed result back to this function,\n // immediately return it.\n if (this.resultCache.has(document)) {\n return document;\n }\n var transformedDocument = this.performWork(document);\n this.resultCache.add(transformedDocument);\n return transformedDocument;\n };\n DocumentTransform.prototype.concat = function (otherTransform) {\n var _this = this;\n return Object.assign(new DocumentTransform(function (document) {\n return otherTransform.transformDocument(_this.transformDocument(document));\n }, \n // Reasonably assume both transforms handle their own caching\n { cache: false }), {\n left: this,\n right: otherTransform,\n });\n };\n return DocumentTransform;\n}());\nexport { DocumentTransform };\n//# sourceMappingURL=DocumentTransform.js.map","import { Observable } from \"./Observable.js\";\n// Like Observable.prototype.map, except that the mapping function can\n// optionally return a Promise (or be async).\nexport function asyncMap(observable, mapFn, catchFn) {\n return new Observable(function (observer) {\n var promiseQueue = {\n // Normally we would initialize promiseQueue to Promise.resolve(), but\n // in this case, for backwards compatibility, we need to be careful to\n // invoke the first callback synchronously.\n then: function (callback) {\n return new Promise(function (resolve) { return resolve(callback()); });\n },\n };\n function makeCallback(examiner, key) {\n return function (arg) {\n if (examiner) {\n var both = function () {\n // If the observer is closed, we don't want to continue calling the\n // mapping function - it's result will be swallowed anyways.\n return observer.closed ?\n /* will be swallowed */ 0\n : examiner(arg);\n };\n promiseQueue = promiseQueue.then(both, both).then(function (result) { return observer.next(result); }, function (error) { return observer.error(error); });\n }\n else {\n observer[key](arg);\n }\n };\n }\n var handler = {\n next: makeCallback(mapFn, \"next\"),\n error: makeCallback(catchFn, \"error\"),\n complete: function () {\n // no need to reassign `promiseQueue`, after `observer.complete`,\n // the observer will be closed and short-circuit everything anyways\n /*promiseQueue = */ promiseQueue.then(function () { return observer.complete(); });\n },\n };\n var sub = observable.subscribe(handler);\n return function () { return sub.unsubscribe(); };\n });\n}\n//# sourceMappingURL=asyncMap.js.map","import { isNonEmptyArray } from \"./arrays.js\";\nimport { isExecutionPatchIncrementalResult } from \"./incrementalResult.js\";\nexport function graphQLResultHasError(result) {\n var errors = getGraphQLErrorsFromResult(result);\n return isNonEmptyArray(errors);\n}\nexport function getGraphQLErrorsFromResult(result) {\n var graphQLErrors = isNonEmptyArray(result.errors) ? result.errors.slice(0) : [];\n if (isExecutionPatchIncrementalResult(result) &&\n isNonEmptyArray(result.incremental)) {\n result.incremental.forEach(function (incrementalResult) {\n if (incrementalResult.errors) {\n graphQLErrors.push.apply(graphQLErrors, incrementalResult.errors);\n }\n });\n }\n return graphQLErrors;\n}\n//# sourceMappingURL=errorHandling.js.map","export function iterateObserversSafely(observers, method, argument) {\n // In case observers is modified during iteration, we need to commit to the\n // original elements, which also provides an opportunity to filter them down\n // to just the observers with the given method.\n var observersWithMethod = [];\n observers.forEach(function (obs) { return obs[method] && observersWithMethod.push(obs); });\n observersWithMethod.forEach(function (obs) { return obs[method](argument); });\n}\n//# sourceMappingURL=iteration.js.map","import { Observable } from \"./Observable.js\";\nimport { canUseSymbol } from \"../common/canUse.js\";\n// Generic implementations of Observable.prototype methods like map and\n// filter need to know how to create a new Observable from an Observable\n// subclass (like Concast or ObservableQuery). Those methods assume\n// (perhaps unwisely?) that they can call the subtype's constructor with a\n// Subscriber function, even though the subclass constructor might expect\n// different parameters. Defining this static Symbol.species property on\n// the subclass is a hint to generic Observable code to use the default\n// constructor instead of trying to do `new Subclass(observer => ...)`.\nexport function fixObservableSubclass(subclass) {\n function set(key) {\n // Object.defineProperty is necessary because the Symbol.species\n // property is a getter by default in modern JS environments, so we\n // can't assign to it with a normal assignment expression.\n Object.defineProperty(subclass, key, { value: Observable });\n }\n if (canUseSymbol && Symbol.species) {\n set(Symbol.species);\n }\n // The \"@@species\" string is used as a fake Symbol.species value in some\n // polyfill systems (including the SymbolSpecies variable used by\n // zen-observable), so we should set it as well, to be safe.\n set(\"@@species\");\n return subclass;\n}\n//# sourceMappingURL=subclassing.js.map","import { __extends } from \"tslib\";\nimport { Observable } from \"./Observable.js\";\nimport { iterateObserversSafely } from \"./iteration.js\";\nimport { fixObservableSubclass } from \"./subclassing.js\";\nfunction isPromiseLike(value) {\n return value && typeof value.then === \"function\";\n}\n// A Concast observable concatenates the given sources into a single\n// non-overlapping sequence of Ts, automatically unwrapping any promises,\n// and broadcasts the T elements of that sequence to any number of\n// subscribers, all without creating a bunch of intermediary Observable\n// wrapper objects.\n//\n// Even though any number of observers can subscribe to the Concast, each\n// source observable is guaranteed to receive at most one subscribe call,\n// and the results are multicast to all observers.\n//\n// In addition to broadcasting every next/error message to this.observers,\n// the Concast stores the most recent message using this.latest, so any\n// new observers can immediately receive the latest message, even if it\n// was originally delivered in the past. This behavior means we can assume\n// every active observer in this.observers has received the same most\n// recent message.\n//\n// With the exception of this.latest replay, a Concast is a \"hot\"\n// observable in the sense that it does not replay past results from the\n// beginning of time for each new observer.\n//\n// Could we have used some existing RxJS class instead? Concast is\n// similar to a BehaviorSubject, because it is multicast and redelivers\n// the latest next/error message to new subscribers. Unlike Subject,\n// Concast does not expose an Observer interface (this.handlers is\n// intentionally private), since Concast gets its inputs from the\n// concatenated sources. If we ever switch to RxJS, there may be some\n// value in reusing their code, but for now we use zen-observable, which\n// does not contain any Subject implementations.\nvar Concast = /** @class */ (function (_super) {\n __extends(Concast, _super);\n // Not only can the individual elements of the iterable be promises, but\n // also the iterable itself can be wrapped in a promise.\n function Concast(sources) {\n var _this = _super.call(this, function (observer) {\n _this.addObserver(observer);\n return function () { return _this.removeObserver(observer); };\n }) || this;\n // Active observers receiving broadcast messages. Thanks to this.latest,\n // we can assume all observers in this Set have received the same most\n // recent message, though possibly at different times in the past.\n _this.observers = new Set();\n _this.promise = new Promise(function (resolve, reject) {\n _this.resolve = resolve;\n _this.reject = reject;\n });\n // Bound handler functions that can be reused for every internal\n // subscription.\n _this.handlers = {\n next: function (result) {\n if (_this.sub !== null) {\n _this.latest = [\"next\", result];\n _this.notify(\"next\", result);\n iterateObserversSafely(_this.observers, \"next\", result);\n }\n },\n error: function (error) {\n var sub = _this.sub;\n if (sub !== null) {\n // Delay unsubscribing from the underlying subscription slightly,\n // so that immediately subscribing another observer can keep the\n // subscription active.\n if (sub)\n setTimeout(function () { return sub.unsubscribe(); });\n _this.sub = null;\n _this.latest = [\"error\", error];\n _this.reject(error);\n _this.notify(\"error\", error);\n iterateObserversSafely(_this.observers, \"error\", error);\n }\n },\n complete: function () {\n var _a = _this, sub = _a.sub, _b = _a.sources, sources = _b === void 0 ? [] : _b;\n if (sub !== null) {\n // If complete is called before concast.start, this.sources may be\n // undefined, so we use a default value of [] for sources. That works\n // here because it falls into the if (!value) {...} block, which\n // appropriately terminates the Concast, even if this.sources might\n // eventually have been initialized to a non-empty array.\n var value = sources.shift();\n if (!value) {\n if (sub)\n setTimeout(function () { return sub.unsubscribe(); });\n _this.sub = null;\n if (_this.latest && _this.latest[0] === \"next\") {\n _this.resolve(_this.latest[1]);\n }\n else {\n _this.resolve();\n }\n _this.notify(\"complete\");\n // We do not store this.latest = [\"complete\"], because doing so\n // discards useful information about the previous next (or\n // error) message. Instead, if new observers subscribe after\n // this Concast has completed, they will receive the final\n // 'next' message (unless there was an error) immediately\n // followed by a 'complete' message (see addObserver).\n iterateObserversSafely(_this.observers, \"complete\");\n }\n else if (isPromiseLike(value)) {\n value.then(function (obs) { return (_this.sub = obs.subscribe(_this.handlers)); }, _this.handlers.error);\n }\n else {\n _this.sub = value.subscribe(_this.handlers);\n }\n }\n },\n };\n _this.nextResultListeners = new Set();\n // A public way to abort observation and broadcast.\n _this.cancel = function (reason) {\n _this.reject(reason);\n _this.sources = [];\n _this.handlers.error(reason);\n };\n // Suppress rejection warnings for this.promise, since it's perfectly\n // acceptable to pay no attention to this.promise if you're consuming\n // the results through the normal observable API.\n _this.promise.catch(function (_) { });\n // If someone accidentally tries to create a Concast using a subscriber\n // function, recover by creating an Observable from that subscriber and\n // using it as the source.\n if (typeof sources === \"function\") {\n sources = [new Observable(sources)];\n }\n if (isPromiseLike(sources)) {\n sources.then(function (iterable) { return _this.start(iterable); }, _this.handlers.error);\n }\n else {\n _this.start(sources);\n }\n return _this;\n }\n Concast.prototype.start = function (sources) {\n if (this.sub !== void 0)\n return;\n // In practice, sources is most often simply an Array of observables.\n // TODO Consider using sources[Symbol.iterator]() to take advantage\n // of the laziness of non-Array iterables.\n this.sources = Array.from(sources);\n // Calling this.handlers.complete() kicks off consumption of the first\n // source observable. It's tempting to do this step lazily in\n // addObserver, but this.promise can be accessed without calling\n // addObserver, so consumption needs to begin eagerly.\n this.handlers.complete();\n };\n Concast.prototype.deliverLastMessage = function (observer) {\n if (this.latest) {\n var nextOrError = this.latest[0];\n var method = observer[nextOrError];\n if (method) {\n method.call(observer, this.latest[1]);\n }\n // If the subscription is already closed, and the last message was\n // a 'next' message, simulate delivery of the final 'complete'\n // message again.\n if (this.sub === null && nextOrError === \"next\" && observer.complete) {\n observer.complete();\n }\n }\n };\n Concast.prototype.addObserver = function (observer) {\n if (!this.observers.has(observer)) {\n // Immediately deliver the most recent message, so we can always\n // be sure all observers have the latest information.\n this.deliverLastMessage(observer);\n this.observers.add(observer);\n }\n };\n Concast.prototype.removeObserver = function (observer) {\n if (this.observers.delete(observer) && this.observers.size < 1) {\n // In case there are still any listeners in this.nextResultListeners, and\n // no error or completion has been broadcast yet, make sure those\n // observers have a chance to run and then remove themselves from\n // this.observers.\n this.handlers.complete();\n }\n };\n Concast.prototype.notify = function (method, arg) {\n var nextResultListeners = this.nextResultListeners;\n if (nextResultListeners.size) {\n // Replacing this.nextResultListeners first ensures it does not grow while\n // we are iterating over it, potentially leading to infinite loops.\n this.nextResultListeners = new Set();\n nextResultListeners.forEach(function (listener) { return listener(method, arg); });\n }\n };\n // We need a way to run callbacks just *before* the next result (or error or\n // completion) is delivered by this Concast, so we can be sure any code that\n // runs as a result of delivering that result/error observes the effects of\n // running the callback(s). It was tempting to reuse the Observer type instead\n // of introducing NextResultListener, but that messes with the sizing and\n // maintenance of this.observers, and ends up being more code overall.\n Concast.prototype.beforeNext = function (callback) {\n var called = false;\n this.nextResultListeners.add(function (method, arg) {\n if (!called) {\n called = true;\n callback(method, arg);\n }\n });\n };\n return Concast;\n}(Observable));\nexport { Concast };\n// Necessary because the Concast constructor has a different signature\n// than the Observable constructor.\nfixObservableSubclass(Concast);\n//# sourceMappingURL=Concast.js.map","var toString = Object.prototype.toString;\n/**\n * Deeply clones a value to create a new instance.\n */\nexport function cloneDeep(value) {\n return cloneDeepHelper(value);\n}\nfunction cloneDeepHelper(val, seen) {\n switch (toString.call(val)) {\n case \"[object Array]\": {\n seen = seen || new Map();\n if (seen.has(val))\n return seen.get(val);\n var copy_1 = val.slice(0);\n seen.set(val, copy_1);\n copy_1.forEach(function (child, i) {\n copy_1[i] = cloneDeepHelper(child, seen);\n });\n return copy_1;\n }\n case \"[object Object]\": {\n seen = seen || new Map();\n if (seen.has(val))\n return seen.get(val);\n // High fidelity polyfills of Object.create and Object.getPrototypeOf are\n // possible in all JS environments, so we will assume they exist/work.\n var copy_2 = Object.create(Object.getPrototypeOf(val));\n seen.set(val, copy_2);\n Object.keys(val).forEach(function (key) {\n copy_2[key] = cloneDeepHelper(val[key], seen);\n });\n return copy_2;\n }\n default:\n return val;\n }\n}\n//# sourceMappingURL=cloneDeep.js.map","import { __rest } from \"tslib\";\nimport equal from \"@wry/equality\";\nimport { createFragmentMap, getFragmentDefinitions, getFragmentFromSelection, getMainDefinition, isField, resultKeyNameFromField, shouldInclude, } from \"../utilities/index.js\";\n// Returns true if aResult and bResult are deeply equal according to the fields\n// selected by the given query, ignoring any fields marked as @nonreactive.\nexport function equalByQuery(query, _a, _b, variables) {\n var aData = _a.data, aRest = __rest(_a, [\"data\"]);\n var bData = _b.data, bRest = __rest(_b, [\"data\"]);\n return (equal(aRest, bRest) &&\n equalBySelectionSet(getMainDefinition(query).selectionSet, aData, bData, {\n fragmentMap: createFragmentMap(getFragmentDefinitions(query)),\n variables: variables,\n }));\n}\nfunction equalBySelectionSet(selectionSet, aResult, bResult, context) {\n if (aResult === bResult) {\n return true;\n }\n var seenSelections = new Set();\n // Returning true from this Array.prototype.every callback function skips the\n // current field/subtree. Returning false aborts the entire traversal\n // immediately, causing equalBySelectionSet to return false.\n return selectionSet.selections.every(function (selection) {\n // Avoid re-processing the same selection at the same level of recursion, in\n // case the same field gets included via multiple indirect fragment spreads.\n if (seenSelections.has(selection))\n return true;\n seenSelections.add(selection);\n // Ignore @skip(if: true) and @include(if: false) fields.\n if (!shouldInclude(selection, context.variables))\n return true;\n // If the field or (named) fragment spread has a @nonreactive directive on\n // it, we don't care if it's different, so we pretend it's the same.\n if (selectionHasNonreactiveDirective(selection))\n return true;\n if (isField(selection)) {\n var resultKey = resultKeyNameFromField(selection);\n var aResultChild = aResult && aResult[resultKey];\n var bResultChild = bResult && bResult[resultKey];\n var childSelectionSet = selection.selectionSet;\n if (!childSelectionSet) {\n // These are scalar values, so we can compare them with deep equal\n // without redoing the main recursive work.\n return equal(aResultChild, bResultChild);\n }\n var aChildIsArray = Array.isArray(aResultChild);\n var bChildIsArray = Array.isArray(bResultChild);\n if (aChildIsArray !== bChildIsArray)\n return false;\n if (aChildIsArray && bChildIsArray) {\n var length_1 = aResultChild.length;\n if (bResultChild.length !== length_1) {\n return false;\n }\n for (var i = 0; i < length_1; ++i) {\n if (!equalBySelectionSet(childSelectionSet, aResultChild[i], bResultChild[i], context)) {\n return false;\n }\n }\n return true;\n }\n return equalBySelectionSet(childSelectionSet, aResultChild, bResultChild, context);\n }\n else {\n var fragment = getFragmentFromSelection(selection, context.fragmentMap);\n if (fragment) {\n // The fragment might === selection if it's an inline fragment, but\n // could be !== if it's a named fragment ...spread.\n if (selectionHasNonreactiveDirective(fragment))\n return true;\n return equalBySelectionSet(fragment.selectionSet, \n // Notice that we reuse the same aResult and bResult values here,\n // since the fragment ...spread does not specify a field name, but\n // consists of multiple fields (within the fragment's selection set)\n // that should be applied to the current result value(s).\n aResult, bResult, context);\n }\n }\n });\n}\nfunction selectionHasNonreactiveDirective(selection) {\n return (!!selection.directives && selection.directives.some(directiveIsNonreactive));\n}\nfunction directiveIsNonreactive(dir) {\n return dir.name.value === \"nonreactive\";\n}\n//# sourceMappingURL=equalByQuery.js.map","import { __assign, __extends } from \"tslib\";\nimport { invariant } from \"../utilities/globals/index.js\";\nimport { equal } from \"@wry/equality\";\nimport { NetworkStatus, isNetworkRequestInFlight } from \"./networkStatus.js\";\nimport { cloneDeep, compact, getOperationDefinition, Observable, iterateObserversSafely, fixObservableSubclass, getQueryDefinition, } from \"../utilities/index.js\";\nimport { ApolloError, isApolloError } from \"../errors/index.js\";\nimport { equalByQuery } from \"./equalByQuery.js\";\nvar assign = Object.assign, hasOwnProperty = Object.hasOwnProperty;\nvar ObservableQuery = /** @class */ (function (_super) {\n __extends(ObservableQuery, _super);\n function ObservableQuery(_a) {\n var queryManager = _a.queryManager, queryInfo = _a.queryInfo, options = _a.options;\n var _this = _super.call(this, function (observer) {\n // Zen Observable has its own error function, so in order to log correctly\n // we need to provide a custom error callback.\n try {\n var subObserver = observer._subscription._observer;\n if (subObserver && !subObserver.error) {\n subObserver.error = defaultSubscriptionObserverErrorCallback;\n }\n }\n catch (_a) { }\n var first = !_this.observers.size;\n _this.observers.add(observer);\n // Deliver most recent error or result.\n var last = _this.last;\n if (last && last.error) {\n observer.error && observer.error(last.error);\n }\n else if (last && last.result) {\n observer.next && observer.next(last.result);\n }\n // Initiate observation of this query if it hasn't been reported to\n // the QueryManager yet.\n if (first) {\n // Blindly catching here prevents unhandled promise rejections,\n // and is safe because the ObservableQuery handles this error with\n // this.observer.error, so we're not just swallowing the error by\n // ignoring it here.\n _this.reobserve().catch(function () { });\n }\n return function () {\n if (_this.observers.delete(observer) && !_this.observers.size) {\n _this.tearDownQuery();\n }\n };\n }) || this;\n _this.observers = new Set();\n _this.subscriptions = new Set();\n // related classes\n _this.queryInfo = queryInfo;\n _this.queryManager = queryManager;\n // active state\n _this.waitForOwnResult = skipCacheDataFor(options.fetchPolicy);\n _this.isTornDown = false;\n _this.subscribeToMore = _this.subscribeToMore.bind(_this);\n var _b = queryManager.defaultOptions.watchQuery, _c = _b === void 0 ? {} : _b, _d = _c.fetchPolicy, defaultFetchPolicy = _d === void 0 ? \"cache-first\" : _d;\n var _e = options.fetchPolicy, fetchPolicy = _e === void 0 ? defaultFetchPolicy : _e, \n // Make sure we don't store \"standby\" as the initialFetchPolicy.\n _f = options.initialFetchPolicy, \n // Make sure we don't store \"standby\" as the initialFetchPolicy.\n initialFetchPolicy = _f === void 0 ? fetchPolicy === \"standby\" ? defaultFetchPolicy : (fetchPolicy) : _f;\n _this.options = __assign(__assign({}, options), { \n // Remember the initial options.fetchPolicy so we can revert back to this\n // policy when variables change. This information can also be specified\n // (or overridden) by providing options.initialFetchPolicy explicitly.\n initialFetchPolicy: initialFetchPolicy, \n // This ensures this.options.fetchPolicy always has a string value, in\n // case options.fetchPolicy was not provided.\n fetchPolicy: fetchPolicy });\n _this.queryId = queryInfo.queryId || queryManager.generateQueryId();\n var opDef = getOperationDefinition(_this.query);\n _this.queryName = opDef && opDef.name && opDef.name.value;\n return _this;\n }\n Object.defineProperty(ObservableQuery.prototype, \"query\", {\n // The `query` computed property will always reflect the document transformed\n // by the last run query. `this.options.query` will always reflect the raw\n // untransformed query to ensure document transforms with runtime conditionals\n // are run on the original document.\n get: function () {\n return this.lastQuery || this.options.query;\n },\n enumerable: false,\n configurable: true\n });\n Object.defineProperty(ObservableQuery.prototype, \"variables\", {\n // Computed shorthand for this.options.variables, preserved for\n // backwards compatibility.\n /**\n * An object containing the variables that were provided for the query.\n */\n get: function () {\n return this.options.variables;\n },\n enumerable: false,\n configurable: true\n });\n ObservableQuery.prototype.result = function () {\n var _this = this;\n return new Promise(function (resolve, reject) {\n // TODO: this code doesn’t actually make sense insofar as the observer\n // will never exist in this.observers due how zen-observable wraps observables.\n // https://github.com/zenparsing/zen-observable/blob/master/src/Observable.js#L169\n var observer = {\n next: function (result) {\n resolve(result);\n // Stop the query within the QueryManager if we can before\n // this function returns.\n //\n // We do this in order to prevent observers piling up within\n // the QueryManager. Notice that we only fully unsubscribe\n // from the subscription in a setTimeout(..., 0) call. This call can\n // actually be handled by the browser at a much later time. If queries\n // are fired in the meantime, observers that should have been removed\n // from the QueryManager will continue to fire, causing an unnecessary\n // performance hit.\n _this.observers.delete(observer);\n if (!_this.observers.size) {\n _this.queryManager.removeQuery(_this.queryId);\n }\n setTimeout(function () {\n subscription.unsubscribe();\n }, 0);\n },\n error: reject,\n };\n var subscription = _this.subscribe(observer);\n });\n };\n /** @internal */\n ObservableQuery.prototype.resetDiff = function () {\n this.queryInfo.resetDiff();\n };\n ObservableQuery.prototype.getCurrentResult = function (saveAsLastResult) {\n if (saveAsLastResult === void 0) { saveAsLastResult = true; }\n // Use the last result as long as the variables match this.variables.\n var lastResult = this.getLastResult(true);\n var networkStatus = this.queryInfo.networkStatus ||\n (lastResult && lastResult.networkStatus) ||\n NetworkStatus.ready;\n var result = __assign(__assign({}, lastResult), { loading: isNetworkRequestInFlight(networkStatus), networkStatus: networkStatus });\n var _a = this.options.fetchPolicy, fetchPolicy = _a === void 0 ? \"cache-first\" : _a;\n if (\n // These fetch policies should never deliver data from the cache, unless\n // redelivering a previously delivered result.\n skipCacheDataFor(fetchPolicy) ||\n // If this.options.query has @client(always: true) fields, we cannot\n // trust diff.result, since it was read from the cache without running\n // local resolvers (and it's too late to run resolvers now, since we must\n // return a result synchronously).\n this.queryManager.getDocumentInfo(this.query).hasForcedResolvers) {\n // Fall through.\n }\n else if (this.waitForOwnResult) {\n // This would usually be a part of `QueryInfo.getDiff()`.\n // which we skip in the waitForOwnResult case since we are not\n // interested in the diff.\n this.queryInfo[\"updateWatch\"]();\n }\n else {\n var diff = this.queryInfo.getDiff();\n if (diff.complete || this.options.returnPartialData) {\n result.data = diff.result;\n }\n if (equal(result.data, {})) {\n result.data = void 0;\n }\n if (diff.complete) {\n // Similar to setting result.partial to false, but taking advantage of the\n // falsiness of missing fields.\n delete result.partial;\n // If the diff is complete, and we're using a FetchPolicy that\n // terminates after a complete cache read, we can assume the next result\n // we receive will have NetworkStatus.ready and !loading.\n if (diff.complete &&\n result.networkStatus === NetworkStatus.loading &&\n (fetchPolicy === \"cache-first\" || fetchPolicy === \"cache-only\")) {\n result.networkStatus = NetworkStatus.ready;\n result.loading = false;\n }\n }\n else {\n result.partial = true;\n }\n if (globalThis.__DEV__ !== false &&\n !diff.complete &&\n !this.options.partialRefetch &&\n !result.loading &&\n !result.data &&\n !result.error) {\n logMissingFieldErrors(diff.missing);\n }\n }\n if (saveAsLastResult) {\n this.updateLastResult(result);\n }\n return result;\n };\n // Compares newResult to the snapshot we took of this.lastResult when it was\n // first received.\n ObservableQuery.prototype.isDifferentFromLastResult = function (newResult, variables) {\n if (!this.last) {\n return true;\n }\n var resultIsDifferent = this.queryManager.getDocumentInfo(this.query).hasNonreactiveDirective ?\n !equalByQuery(this.query, this.last.result, newResult, this.variables)\n : !equal(this.last.result, newResult);\n return (resultIsDifferent || (variables && !equal(this.last.variables, variables)));\n };\n ObservableQuery.prototype.getLast = function (key, variablesMustMatch) {\n var last = this.last;\n if (last &&\n last[key] &&\n (!variablesMustMatch || equal(last.variables, this.variables))) {\n return last[key];\n }\n };\n ObservableQuery.prototype.getLastResult = function (variablesMustMatch) {\n return this.getLast(\"result\", variablesMustMatch);\n };\n ObservableQuery.prototype.getLastError = function (variablesMustMatch) {\n return this.getLast(\"error\", variablesMustMatch);\n };\n ObservableQuery.prototype.resetLastResults = function () {\n delete this.last;\n this.isTornDown = false;\n };\n ObservableQuery.prototype.resetQueryStoreErrors = function () {\n this.queryManager.resetErrors(this.queryId);\n };\n /**\n * Update the variables of this observable query, and fetch the new results.\n * This method should be preferred over `setVariables` in most use cases.\n *\n * @param variables - The new set of variables. If there are missing variables,\n * the previous values of those variables will be used.\n */\n ObservableQuery.prototype.refetch = function (variables) {\n var _a;\n var reobserveOptions = {\n // Always disable polling for refetches.\n pollInterval: 0,\n };\n // Unless the provided fetchPolicy always consults the network\n // (no-cache, network-only, or cache-and-network), override it with\n // network-only to force the refetch for this fetchQuery call.\n var fetchPolicy = this.options.fetchPolicy;\n if (fetchPolicy === \"cache-and-network\") {\n reobserveOptions.fetchPolicy = fetchPolicy;\n }\n else if (fetchPolicy === \"no-cache\") {\n reobserveOptions.fetchPolicy = \"no-cache\";\n }\n else {\n reobserveOptions.fetchPolicy = \"network-only\";\n }\n if (globalThis.__DEV__ !== false && variables && hasOwnProperty.call(variables, \"variables\")) {\n var queryDef = getQueryDefinition(this.query);\n var vars = queryDef.variableDefinitions;\n if (!vars || !vars.some(function (v) { return v.variable.name.value === \"variables\"; })) {\n globalThis.__DEV__ !== false && invariant.warn(\n 20,\n variables,\n ((_a = queryDef.name) === null || _a === void 0 ? void 0 : _a.value) || queryDef\n );\n }\n }\n if (variables && !equal(this.options.variables, variables)) {\n // Update the existing options with new variables\n reobserveOptions.variables = this.options.variables = __assign(__assign({}, this.options.variables), variables);\n }\n this.queryInfo.resetLastWrite();\n return this.reobserve(reobserveOptions, NetworkStatus.refetch);\n };\n /**\n * A function that helps you fetch the next set of results for a [paginated list field](https://www.apollographql.com/docs/react/pagination/core-api/).\n */\n ObservableQuery.prototype.fetchMore = function (fetchMoreOptions) {\n var _this = this;\n var combinedOptions = __assign(__assign({}, (fetchMoreOptions.query ? fetchMoreOptions : (__assign(__assign(__assign(__assign({}, this.options), { query: this.options.query }), fetchMoreOptions), { variables: __assign(__assign({}, this.options.variables), fetchMoreOptions.variables) })))), { \n // The fetchMore request goes immediately to the network and does\n // not automatically write its result to the cache (hence no-cache\n // instead of network-only), because we allow the caller of\n // fetchMore to provide an updateQuery callback that determines how\n // the data gets written to the cache.\n fetchPolicy: \"no-cache\" });\n combinedOptions.query = this.transformDocument(combinedOptions.query);\n var qid = this.queryManager.generateQueryId();\n // If a temporary query is passed to `fetchMore`, we don't want to store\n // it as the last query result since it may be an optimized query for\n // pagination. We will however run the transforms on the original document\n // as well as the document passed in `fetchMoreOptions` to ensure the cache\n // uses the most up-to-date document which may rely on runtime conditionals.\n this.lastQuery =\n fetchMoreOptions.query ?\n this.transformDocument(this.options.query)\n : combinedOptions.query;\n // Simulate a loading result for the original query with\n // result.networkStatus === NetworkStatus.fetchMore.\n var queryInfo = this.queryInfo;\n var originalNetworkStatus = queryInfo.networkStatus;\n queryInfo.networkStatus = NetworkStatus.fetchMore;\n if (combinedOptions.notifyOnNetworkStatusChange) {\n this.observe();\n }\n var updatedQuerySet = new Set();\n var updateQuery = fetchMoreOptions === null || fetchMoreOptions === void 0 ? void 0 : fetchMoreOptions.updateQuery;\n var isCached = this.options.fetchPolicy !== \"no-cache\";\n if (!isCached) {\n invariant(updateQuery, 21);\n }\n return this.queryManager\n .fetchQuery(qid, combinedOptions, NetworkStatus.fetchMore)\n .then(function (fetchMoreResult) {\n _this.queryManager.removeQuery(qid);\n if (queryInfo.networkStatus === NetworkStatus.fetchMore) {\n queryInfo.networkStatus = originalNetworkStatus;\n }\n if (isCached) {\n // Performing this cache update inside a cache.batch transaction ensures\n // any affected cache.watch watchers are notified at most once about any\n // updates. Most watchers will be using the QueryInfo class, which\n // responds to notifications by calling reobserveCacheFirst to deliver\n // fetchMore cache results back to this ObservableQuery.\n _this.queryManager.cache.batch({\n update: function (cache) {\n var updateQuery = fetchMoreOptions.updateQuery;\n if (updateQuery) {\n cache.updateQuery({\n query: _this.query,\n variables: _this.variables,\n returnPartialData: true,\n optimistic: false,\n }, function (previous) {\n return updateQuery(previous, {\n fetchMoreResult: fetchMoreResult.data,\n variables: combinedOptions.variables,\n });\n });\n }\n else {\n // If we're using a field policy instead of updateQuery, the only\n // thing we need to do is write the new data to the cache using\n // combinedOptions.variables (instead of this.variables, which is\n // what this.updateQuery uses, because it works by abusing the\n // original field value, keyed by the original variables).\n cache.writeQuery({\n query: combinedOptions.query,\n variables: combinedOptions.variables,\n data: fetchMoreResult.data,\n });\n }\n },\n onWatchUpdated: function (watch) {\n // Record the DocumentNode associated with any watched query whose\n // data were updated by the cache writes above.\n updatedQuerySet.add(watch.query);\n },\n });\n }\n else {\n // There is a possibility `lastResult` may not be set when\n // `fetchMore` is called which would cause this to crash. This should\n // only happen if we haven't previously reported a result. We don't\n // quite know what the right behavior should be here since this block\n // of code runs after the fetch result has executed on the network.\n // We plan to let it crash in the meantime.\n //\n // If we get bug reports due to the `data` property access on\n // undefined, this should give us a real-world scenario that we can\n // use to test against and determine the right behavior. If we do end\n // up changing this behavior, this may require, for example, an\n // adjustment to the types on `updateQuery` since that function\n // expects that the first argument always contains previous result\n // data, but not `undefined`.\n var lastResult = _this.getLast(\"result\");\n var data = updateQuery(lastResult.data, {\n fetchMoreResult: fetchMoreResult.data,\n variables: combinedOptions.variables,\n });\n _this.reportResult(__assign(__assign({}, lastResult), { data: data }), _this.variables);\n }\n return fetchMoreResult;\n })\n .finally(function () {\n // In case the cache writes above did not generate a broadcast\n // notification (which would have been intercepted by onWatchUpdated),\n // likely because the written data were the same as what was already in\n // the cache, we still want fetchMore to deliver its final loading:false\n // result with the unchanged data.\n if (isCached && !updatedQuerySet.has(_this.query)) {\n reobserveCacheFirst(_this);\n }\n });\n };\n // XXX the subscription variables are separate from the query variables.\n // if you want to update subscription variables, right now you have to do that separately,\n // and you can only do it by stopping the subscription and then subscribing again with new variables.\n /**\n * A function that enables you to execute a [subscription](https://www.apollographql.com/docs/react/data/subscriptions/), usually to subscribe to specific fields that were included in the query.\n *\n * This function returns _another_ function that you can call to terminate the subscription.\n */\n ObservableQuery.prototype.subscribeToMore = function (options) {\n var _this = this;\n var subscription = this.queryManager\n .startGraphQLSubscription({\n query: options.document,\n variables: options.variables,\n context: options.context,\n })\n .subscribe({\n next: function (subscriptionData) {\n var updateQuery = options.updateQuery;\n if (updateQuery) {\n _this.updateQuery(function (previous, _a) {\n var variables = _a.variables;\n return updateQuery(previous, {\n subscriptionData: subscriptionData,\n variables: variables,\n });\n });\n }\n },\n error: function (err) {\n if (options.onError) {\n options.onError(err);\n return;\n }\n globalThis.__DEV__ !== false && invariant.error(22, err);\n },\n });\n this.subscriptions.add(subscription);\n return function () {\n if (_this.subscriptions.delete(subscription)) {\n subscription.unsubscribe();\n }\n };\n };\n ObservableQuery.prototype.setOptions = function (newOptions) {\n return this.reobserve(newOptions);\n };\n ObservableQuery.prototype.silentSetOptions = function (newOptions) {\n var mergedOptions = compact(this.options, newOptions || {});\n assign(this.options, mergedOptions);\n };\n /**\n * Update the variables of this observable query, and fetch the new results\n * if they've changed. Most users should prefer `refetch` instead of\n * `setVariables` in order to to be properly notified of results even when\n * they come from the cache.\n *\n * Note: the `next` callback will *not* fire if the variables have not changed\n * or if the result is coming from cache.\n *\n * Note: the promise will return the old results immediately if the variables\n * have not changed.\n *\n * Note: the promise will return null immediately if the query is not active\n * (there are no subscribers).\n *\n * @param variables - The new set of variables. If there are missing variables,\n * the previous values of those variables will be used.\n */\n ObservableQuery.prototype.setVariables = function (variables) {\n if (equal(this.variables, variables)) {\n // If we have no observers, then we don't actually want to make a network\n // request. As soon as someone observes the query, the request will kick\n // off. For now, we just store any changes. (See #1077)\n return this.observers.size ? this.result() : Promise.resolve();\n }\n this.options.variables = variables;\n // See comment above\n if (!this.observers.size) {\n return Promise.resolve();\n }\n return this.reobserve({\n // Reset options.fetchPolicy to its original value.\n fetchPolicy: this.options.initialFetchPolicy,\n variables: variables,\n }, NetworkStatus.setVariables);\n };\n /**\n * A function that enables you to update the query's cached result without executing a followup GraphQL operation.\n *\n * See [using updateQuery and updateFragment](https://www.apollographql.com/docs/react/caching/cache-interaction/#using-updatequery-and-updatefragment) for additional information.\n */\n ObservableQuery.prototype.updateQuery = function (mapFn) {\n var queryManager = this.queryManager;\n var result = queryManager.cache.diff({\n query: this.options.query,\n variables: this.variables,\n returnPartialData: true,\n optimistic: false,\n }).result;\n var newResult = mapFn(result, {\n variables: this.variables,\n });\n if (newResult) {\n queryManager.cache.writeQuery({\n query: this.options.query,\n data: newResult,\n variables: this.variables,\n });\n queryManager.broadcastQueries();\n }\n };\n /**\n * A function that instructs the query to begin re-executing at a specified interval (in milliseconds).\n */\n ObservableQuery.prototype.startPolling = function (pollInterval) {\n this.options.pollInterval = pollInterval;\n this.updatePolling();\n };\n /**\n * A function that instructs the query to stop polling after a previous call to `startPolling`.\n */\n ObservableQuery.prototype.stopPolling = function () {\n this.options.pollInterval = 0;\n this.updatePolling();\n };\n // Update options.fetchPolicy according to options.nextFetchPolicy.\n ObservableQuery.prototype.applyNextFetchPolicy = function (reason, \n // It's possible to use this method to apply options.nextFetchPolicy to\n // options.fetchPolicy even if options !== this.options, though that happens\n // most often when the options are temporary, used for only one request and\n // then thrown away, so nextFetchPolicy may not end up mattering.\n options) {\n if (options.nextFetchPolicy) {\n var _a = options.fetchPolicy, fetchPolicy = _a === void 0 ? \"cache-first\" : _a, _b = options.initialFetchPolicy, initialFetchPolicy = _b === void 0 ? fetchPolicy : _b;\n if (fetchPolicy === \"standby\") {\n // Do nothing, leaving options.fetchPolicy unchanged.\n }\n else if (typeof options.nextFetchPolicy === \"function\") {\n // When someone chooses \"cache-and-network\" or \"network-only\" as their\n // initial FetchPolicy, they often do not want future cache updates to\n // trigger unconditional network requests, which is what repeatedly\n // applying the \"cache-and-network\" or \"network-only\" policies would\n // seem to imply. Instead, when the cache reports an update after the\n // initial network request, it may be desirable for subsequent network\n // requests to be triggered only if the cache result is incomplete. To\n // that end, the options.nextFetchPolicy option provides an easy way to\n // update options.fetchPolicy after the initial network request, without\n // having to call observableQuery.setOptions.\n options.fetchPolicy = options.nextFetchPolicy(fetchPolicy, {\n reason: reason,\n options: options,\n observable: this,\n initialFetchPolicy: initialFetchPolicy,\n });\n }\n else if (reason === \"variables-changed\") {\n options.fetchPolicy = initialFetchPolicy;\n }\n else {\n options.fetchPolicy = options.nextFetchPolicy;\n }\n }\n return options.fetchPolicy;\n };\n ObservableQuery.prototype.fetch = function (options, newNetworkStatus, query) {\n // TODO Make sure we update the networkStatus (and infer fetchVariables)\n // before actually committing to the fetch.\n this.queryManager.setObservableQuery(this);\n return this.queryManager[\"fetchConcastWithInfo\"](this.queryId, options, newNetworkStatus, query);\n };\n // Turns polling on or off based on this.options.pollInterval.\n ObservableQuery.prototype.updatePolling = function () {\n var _this = this;\n // Avoid polling in SSR mode\n if (this.queryManager.ssrMode) {\n return;\n }\n var _a = this, pollingInfo = _a.pollingInfo, pollInterval = _a.options.pollInterval;\n if (!pollInterval || !this.hasObservers()) {\n if (pollingInfo) {\n clearTimeout(pollingInfo.timeout);\n delete this.pollingInfo;\n }\n return;\n }\n if (pollingInfo && pollingInfo.interval === pollInterval) {\n return;\n }\n invariant(pollInterval, 23);\n var info = pollingInfo || (this.pollingInfo = {});\n info.interval = pollInterval;\n var maybeFetch = function () {\n var _a, _b;\n if (_this.pollingInfo) {\n if (!isNetworkRequestInFlight(_this.queryInfo.networkStatus) &&\n !((_b = (_a = _this.options).skipPollAttempt) === null || _b === void 0 ? void 0 : _b.call(_a))) {\n _this.reobserve({\n // Most fetchPolicy options don't make sense to use in a polling context, as\n // users wouldn't want to be polling the cache directly. However, network-only and\n // no-cache are both useful for when the user wants to control whether or not the\n // polled results are written to the cache.\n fetchPolicy: _this.options.initialFetchPolicy === \"no-cache\" ?\n \"no-cache\"\n : \"network-only\",\n }, NetworkStatus.poll).then(poll, poll);\n }\n else {\n poll();\n }\n }\n };\n var poll = function () {\n var info = _this.pollingInfo;\n if (info) {\n clearTimeout(info.timeout);\n info.timeout = setTimeout(maybeFetch, info.interval);\n }\n };\n poll();\n };\n ObservableQuery.prototype.updateLastResult = function (newResult, variables) {\n if (variables === void 0) { variables = this.variables; }\n var error = this.getLastError();\n // Preserve this.last.error unless the variables have changed.\n if (error && this.last && !equal(variables, this.last.variables)) {\n error = void 0;\n }\n return (this.last = __assign({ result: this.queryManager.assumeImmutableResults ?\n newResult\n : cloneDeep(newResult), variables: variables }, (error ? { error: error } : null)));\n };\n ObservableQuery.prototype.reobserveAsConcast = function (newOptions, newNetworkStatus) {\n var _this = this;\n this.isTornDown = false;\n var useDisposableConcast = \n // Refetching uses a disposable Concast to allow refetches using different\n // options/variables, without permanently altering the options of the\n // original ObservableQuery.\n newNetworkStatus === NetworkStatus.refetch ||\n // The fetchMore method does not actually call the reobserve method, but,\n // if it did, it would definitely use a disposable Concast.\n newNetworkStatus === NetworkStatus.fetchMore ||\n // Polling uses a disposable Concast so the polling options (which force\n // fetchPolicy to be \"network-only\" or \"no-cache\") won't override the original options.\n newNetworkStatus === NetworkStatus.poll;\n // Save the old variables, since Object.assign may modify them below.\n var oldVariables = this.options.variables;\n var oldFetchPolicy = this.options.fetchPolicy;\n var mergedOptions = compact(this.options, newOptions || {});\n var options = useDisposableConcast ?\n // Disposable Concast fetches receive a shallow copy of this.options\n // (merged with newOptions), leaving this.options unmodified.\n mergedOptions\n : assign(this.options, mergedOptions);\n // Don't update options.query with the transformed query to avoid\n // overwriting this.options.query when we aren't using a disposable concast.\n // We want to ensure we can re-run the custom document transforms the next\n // time a request is made against the original query.\n var query = this.transformDocument(options.query);\n this.lastQuery = query;\n if (!useDisposableConcast) {\n // We can skip calling updatePolling if we're not changing this.options.\n this.updatePolling();\n // Reset options.fetchPolicy to its original value when variables change,\n // unless a new fetchPolicy was provided by newOptions.\n if (newOptions &&\n newOptions.variables &&\n !equal(newOptions.variables, oldVariables) &&\n // Don't mess with the fetchPolicy if it's currently \"standby\".\n options.fetchPolicy !== \"standby\" &&\n // If we're changing the fetchPolicy anyway, don't try to change it here\n // using applyNextFetchPolicy. The explicit options.fetchPolicy wins.\n (options.fetchPolicy === oldFetchPolicy ||\n // A `nextFetchPolicy` function has even higher priority, though,\n // so in that case `applyNextFetchPolicy` must be called.\n typeof options.nextFetchPolicy === \"function\")) {\n this.applyNextFetchPolicy(\"variables-changed\", options);\n if (newNetworkStatus === void 0) {\n newNetworkStatus = NetworkStatus.setVariables;\n }\n }\n }\n this.waitForOwnResult && (this.waitForOwnResult = skipCacheDataFor(options.fetchPolicy));\n var finishWaitingForOwnResult = function () {\n if (_this.concast === concast) {\n _this.waitForOwnResult = false;\n }\n };\n var variables = options.variables && __assign({}, options.variables);\n var _a = this.fetch(options, newNetworkStatus, query), concast = _a.concast, fromLink = _a.fromLink;\n var observer = {\n next: function (result) {\n if (equal(_this.variables, variables)) {\n finishWaitingForOwnResult();\n _this.reportResult(result, variables);\n }\n },\n error: function (error) {\n if (equal(_this.variables, variables)) {\n // Coming from `getResultsFromLink`, `error` here should always be an `ApolloError`.\n // However, calling `concast.cancel` can inject another type of error, so we have to\n // wrap it again here.\n if (!isApolloError(error)) {\n error = new ApolloError({ networkError: error });\n }\n finishWaitingForOwnResult();\n _this.reportError(error, variables);\n }\n },\n };\n if (!useDisposableConcast && (fromLink || !this.concast)) {\n // We use the {add,remove}Observer methods directly to avoid wrapping\n // observer with an unnecessary SubscriptionObserver object.\n if (this.concast && this.observer) {\n this.concast.removeObserver(this.observer);\n }\n this.concast = concast;\n this.observer = observer;\n }\n concast.addObserver(observer);\n return concast;\n };\n ObservableQuery.prototype.reobserve = function (newOptions, newNetworkStatus) {\n return this.reobserveAsConcast(newOptions, newNetworkStatus)\n .promise;\n };\n ObservableQuery.prototype.resubscribeAfterError = function () {\n var args = [];\n for (var _i = 0; _i < arguments.length; _i++) {\n args[_i] = arguments[_i];\n }\n // If `lastError` is set in the current when the subscription is re-created,\n // the subscription will immediately receive the error, which will\n // cause it to terminate again. To avoid this, we first clear\n // the last error/result from the `observableQuery` before re-starting\n // the subscription, and restore the last value afterwards so that the\n // subscription has a chance to stay open.\n var last = this.last;\n this.resetLastResults();\n var subscription = this.subscribe.apply(this, args);\n this.last = last;\n return subscription;\n };\n // (Re)deliver the current result to this.observers without applying fetch\n // policies or making network requests.\n ObservableQuery.prototype.observe = function () {\n this.reportResult(\n // Passing false is important so that this.getCurrentResult doesn't\n // save the fetchMore result as this.lastResult, causing it to be\n // ignored due to the this.isDifferentFromLastResult check in\n // this.reportResult.\n this.getCurrentResult(false), this.variables);\n };\n ObservableQuery.prototype.reportResult = function (result, variables) {\n var lastError = this.getLastError();\n var isDifferent = this.isDifferentFromLastResult(result, variables);\n // Update the last result even when isDifferentFromLastResult returns false,\n // because the query may be using the @nonreactive directive, and we want to\n // save the the latest version of any nonreactive subtrees (in case\n // getCurrentResult is called), even though we skip broadcasting changes.\n if (lastError || !result.partial || this.options.returnPartialData) {\n this.updateLastResult(result, variables);\n }\n if (lastError || isDifferent) {\n iterateObserversSafely(this.observers, \"next\", result);\n }\n };\n ObservableQuery.prototype.reportError = function (error, variables) {\n // Since we don't get the current result on errors, only the error, we\n // must mirror the updates that occur in QueryStore.markQueryError here\n var errorResult = __assign(__assign({}, this.getLastResult()), { error: error, errors: error.graphQLErrors, networkStatus: NetworkStatus.error, loading: false });\n this.updateLastResult(errorResult, variables);\n iterateObserversSafely(this.observers, \"error\", (this.last.error = error));\n };\n ObservableQuery.prototype.hasObservers = function () {\n return this.observers.size > 0;\n };\n ObservableQuery.prototype.tearDownQuery = function () {\n if (this.isTornDown)\n return;\n if (this.concast && this.observer) {\n this.concast.removeObserver(this.observer);\n delete this.concast;\n delete this.observer;\n }\n this.stopPolling();\n // stop all active GraphQL subscriptions\n this.subscriptions.forEach(function (sub) { return sub.unsubscribe(); });\n this.subscriptions.clear();\n this.queryManager.stopQuery(this.queryId);\n this.observers.clear();\n this.isTornDown = true;\n };\n ObservableQuery.prototype.transformDocument = function (document) {\n return this.queryManager.transform(document);\n };\n return ObservableQuery;\n}(Observable));\nexport { ObservableQuery };\n// Necessary because the ObservableQuery constructor has a different\n// signature than the Observable constructor.\nfixObservableSubclass(ObservableQuery);\n// Reobserve with fetchPolicy effectively set to \"cache-first\", triggering\n// delivery of any new data from the cache, possibly falling back to the network\n// if any cache data are missing. This allows _complete_ cache results to be\n// delivered without also kicking off unnecessary network requests when\n// this.options.fetchPolicy is \"cache-and-network\" or \"network-only\". When\n// this.options.fetchPolicy is any other policy (\"cache-first\", \"cache-only\",\n// \"standby\", or \"no-cache\"), we call this.reobserve() as usual.\nexport function reobserveCacheFirst(obsQuery) {\n var _a = obsQuery.options, fetchPolicy = _a.fetchPolicy, nextFetchPolicy = _a.nextFetchPolicy;\n if (fetchPolicy === \"cache-and-network\" || fetchPolicy === \"network-only\") {\n return obsQuery.reobserve({\n fetchPolicy: \"cache-first\",\n // Use a temporary nextFetchPolicy function that replaces itself with the\n // previous nextFetchPolicy value and returns the original fetchPolicy.\n nextFetchPolicy: function (currentFetchPolicy, context) {\n // Replace this nextFetchPolicy function in the options object with the\n // original this.options.nextFetchPolicy value.\n this.nextFetchPolicy = nextFetchPolicy;\n // If the original nextFetchPolicy value was a function, give it a\n // chance to decide what happens here.\n if (typeof this.nextFetchPolicy === \"function\") {\n return this.nextFetchPolicy(currentFetchPolicy, context);\n }\n // Otherwise go back to the original this.options.fetchPolicy.\n return fetchPolicy;\n },\n });\n }\n return obsQuery.reobserve();\n}\nfunction defaultSubscriptionObserverErrorCallback(error) {\n globalThis.__DEV__ !== false && invariant.error(24, error.message, error.stack);\n}\nexport function logMissingFieldErrors(missing) {\n if (globalThis.__DEV__ !== false && missing) {\n globalThis.__DEV__ !== false && invariant.debug(25, missing);\n }\n}\nfunction skipCacheDataFor(fetchPolicy /* `undefined` would mean `\"cache-first\"` */) {\n return (fetchPolicy === \"network-only\" ||\n fetchPolicy === \"no-cache\" ||\n fetchPolicy === \"standby\");\n}\n//# sourceMappingURL=ObservableQuery.js.map","import { __assign } from \"tslib\";\nimport { equal } from \"@wry/equality\";\nimport { DeepMerger } from \"../utilities/index.js\";\nimport { mergeIncrementalData } from \"../utilities/index.js\";\nimport { reobserveCacheFirst } from \"./ObservableQuery.js\";\nimport { isNonEmptyArray, graphQLResultHasError, canUseWeakMap, } from \"../utilities/index.js\";\nimport { NetworkStatus, isNetworkRequestInFlight } from \"./networkStatus.js\";\nvar destructiveMethodCounts = new (canUseWeakMap ? WeakMap : Map)();\nfunction wrapDestructiveCacheMethod(cache, methodName) {\n var original = cache[methodName];\n if (typeof original === \"function\") {\n // @ts-expect-error this is just too generic to be typed correctly\n cache[methodName] = function () {\n destructiveMethodCounts.set(cache, \n // The %1e15 allows the count to wrap around to 0 safely every\n // quadrillion evictions, so there's no risk of overflow. To be\n // clear, this is more of a pedantic principle than something\n // that matters in any conceivable practical scenario.\n (destructiveMethodCounts.get(cache) + 1) % 1e15);\n // @ts-expect-error this is just too generic to be typed correctly\n return original.apply(this, arguments);\n };\n }\n}\nfunction cancelNotifyTimeout(info) {\n if (info[\"notifyTimeout\"]) {\n clearTimeout(info[\"notifyTimeout\"]);\n info[\"notifyTimeout\"] = void 0;\n }\n}\n// A QueryInfo object represents a single query managed by the\n// QueryManager, which tracks all QueryInfo objects by queryId in its\n// this.queries Map. QueryInfo objects store the latest results and errors\n// for the given query, and are responsible for reporting those results to\n// the corresponding ObservableQuery, via the QueryInfo.notify method.\n// Results are reported asynchronously whenever setDiff marks the\n// QueryInfo object as dirty, though a call to the QueryManager's\n// broadcastQueries method may trigger the notification before it happens\n// automatically. This class used to be a simple interface type without\n// any field privacy or meaningful methods, which is why it still has so\n// many public fields. The effort to lock down and simplify the QueryInfo\n// interface is ongoing, and further improvements are welcome.\nvar QueryInfo = /** @class */ (function () {\n function QueryInfo(queryManager, queryId) {\n if (queryId === void 0) { queryId = queryManager.generateQueryId(); }\n this.queryId = queryId;\n this.listeners = new Set();\n this.document = null;\n this.lastRequestId = 1;\n this.stopped = false;\n this.dirty = false;\n this.observableQuery = null;\n var cache = (this.cache = queryManager.cache);\n // Track how often cache.evict is called, since we want eviction to\n // override the feud-stopping logic in the markResult method, by\n // causing shouldWrite to return true. Wrapping the cache.evict method\n // is a bit of a hack, but it saves us from having to make eviction\n // counting an official part of the ApolloCache API.\n if (!destructiveMethodCounts.has(cache)) {\n destructiveMethodCounts.set(cache, 0);\n wrapDestructiveCacheMethod(cache, \"evict\");\n wrapDestructiveCacheMethod(cache, \"modify\");\n wrapDestructiveCacheMethod(cache, \"reset\");\n }\n }\n QueryInfo.prototype.init = function (query) {\n var networkStatus = query.networkStatus || NetworkStatus.loading;\n if (this.variables &&\n this.networkStatus !== NetworkStatus.loading &&\n !equal(this.variables, query.variables)) {\n networkStatus = NetworkStatus.setVariables;\n }\n if (!equal(query.variables, this.variables)) {\n this.lastDiff = void 0;\n }\n Object.assign(this, {\n document: query.document,\n variables: query.variables,\n networkError: null,\n graphQLErrors: this.graphQLErrors || [],\n networkStatus: networkStatus,\n });\n if (query.observableQuery) {\n this.setObservableQuery(query.observableQuery);\n }\n if (query.lastRequestId) {\n this.lastRequestId = query.lastRequestId;\n }\n return this;\n };\n QueryInfo.prototype.reset = function () {\n cancelNotifyTimeout(this);\n this.dirty = false;\n };\n QueryInfo.prototype.resetDiff = function () {\n this.lastDiff = void 0;\n };\n QueryInfo.prototype.getDiff = function () {\n var options = this.getDiffOptions();\n if (this.lastDiff && equal(options, this.lastDiff.options)) {\n return this.lastDiff.diff;\n }\n this.updateWatch(this.variables);\n var oq = this.observableQuery;\n if (oq && oq.options.fetchPolicy === \"no-cache\") {\n return { complete: false };\n }\n var diff = this.cache.diff(options);\n this.updateLastDiff(diff, options);\n return diff;\n };\n QueryInfo.prototype.updateLastDiff = function (diff, options) {\n this.lastDiff =\n diff ?\n {\n diff: diff,\n options: options || this.getDiffOptions(),\n }\n : void 0;\n };\n QueryInfo.prototype.getDiffOptions = function (variables) {\n var _a;\n if (variables === void 0) { variables = this.variables; }\n return {\n query: this.document,\n variables: variables,\n returnPartialData: true,\n optimistic: true,\n canonizeResults: (_a = this.observableQuery) === null || _a === void 0 ? void 0 : _a.options.canonizeResults,\n };\n };\n QueryInfo.prototype.setDiff = function (diff) {\n var _this = this;\n var _a;\n var oldDiff = this.lastDiff && this.lastDiff.diff;\n // If we are trying to deliver an incomplete cache result, we avoid\n // reporting it if the query has errored, otherwise we let the broadcast try\n // and repair the partial result by refetching the query. This check avoids\n // a situation where a query that errors and another succeeds with\n // overlapping data does not report the partial data result to the errored\n // query.\n //\n // See https://github.com/apollographql/apollo-client/issues/11400 for more\n // information on this issue.\n if (diff && !diff.complete && ((_a = this.observableQuery) === null || _a === void 0 ? void 0 : _a.getLastError())) {\n return;\n }\n this.updateLastDiff(diff);\n if (!this.dirty && !equal(oldDiff && oldDiff.result, diff && diff.result)) {\n this.dirty = true;\n if (!this.notifyTimeout) {\n this.notifyTimeout = setTimeout(function () { return _this.notify(); }, 0);\n }\n }\n };\n QueryInfo.prototype.setObservableQuery = function (oq) {\n var _this = this;\n if (oq === this.observableQuery)\n return;\n if (this.oqListener) {\n this.listeners.delete(this.oqListener);\n }\n this.observableQuery = oq;\n if (oq) {\n oq[\"queryInfo\"] = this;\n this.listeners.add((this.oqListener = function () {\n var diff = _this.getDiff();\n if (diff.fromOptimisticTransaction) {\n // If this diff came from an optimistic transaction, deliver the\n // current cache data to the ObservableQuery, but don't perform a\n // reobservation, since oq.reobserveCacheFirst might make a network\n // request, and we never want to trigger network requests in the\n // middle of optimistic updates.\n oq[\"observe\"]();\n }\n else {\n // Otherwise, make the ObservableQuery \"reobserve\" the latest data\n // using a temporary fetch policy of \"cache-first\", so complete cache\n // results have a chance to be delivered without triggering additional\n // network requests, even when options.fetchPolicy is \"network-only\"\n // or \"cache-and-network\". All other fetch policies are preserved by\n // this method, and are handled by calling oq.reobserve(). If this\n // reobservation is spurious, isDifferentFromLastResult still has a\n // chance to catch it before delivery to ObservableQuery subscribers.\n reobserveCacheFirst(oq);\n }\n }));\n }\n else {\n delete this.oqListener;\n }\n };\n QueryInfo.prototype.notify = function () {\n var _this = this;\n cancelNotifyTimeout(this);\n if (this.shouldNotify()) {\n this.listeners.forEach(function (listener) { return listener(_this); });\n }\n this.dirty = false;\n };\n QueryInfo.prototype.shouldNotify = function () {\n if (!this.dirty || !this.listeners.size) {\n return false;\n }\n if (isNetworkRequestInFlight(this.networkStatus) && this.observableQuery) {\n var fetchPolicy = this.observableQuery.options.fetchPolicy;\n if (fetchPolicy !== \"cache-only\" && fetchPolicy !== \"cache-and-network\") {\n return false;\n }\n }\n return true;\n };\n QueryInfo.prototype.stop = function () {\n if (!this.stopped) {\n this.stopped = true;\n // Cancel the pending notify timeout\n this.reset();\n this.cancel();\n // Revert back to the no-op version of cancel inherited from\n // QueryInfo.prototype.\n this.cancel = QueryInfo.prototype.cancel;\n var oq = this.observableQuery;\n if (oq)\n oq.stopPolling();\n }\n };\n // This method is a no-op by default, until/unless overridden by the\n // updateWatch method.\n QueryInfo.prototype.cancel = function () { };\n QueryInfo.prototype.updateWatch = function (variables) {\n var _this = this;\n if (variables === void 0) { variables = this.variables; }\n var oq = this.observableQuery;\n if (oq && oq.options.fetchPolicy === \"no-cache\") {\n return;\n }\n var watchOptions = __assign(__assign({}, this.getDiffOptions(variables)), { watcher: this, callback: function (diff) { return _this.setDiff(diff); } });\n if (!this.lastWatch || !equal(watchOptions, this.lastWatch)) {\n this.cancel();\n this.cancel = this.cache.watch((this.lastWatch = watchOptions));\n }\n };\n QueryInfo.prototype.resetLastWrite = function () {\n this.lastWrite = void 0;\n };\n QueryInfo.prototype.shouldWrite = function (result, variables) {\n var lastWrite = this.lastWrite;\n return !(lastWrite &&\n // If cache.evict has been called since the last time we wrote this\n // data into the cache, there's a chance writing this result into\n // the cache will repair what was evicted.\n lastWrite.dmCount === destructiveMethodCounts.get(this.cache) &&\n equal(variables, lastWrite.variables) &&\n equal(result.data, lastWrite.result.data));\n };\n QueryInfo.prototype.markResult = function (result, document, options, cacheWriteBehavior) {\n var _this = this;\n var merger = new DeepMerger();\n var graphQLErrors = isNonEmptyArray(result.errors) ? result.errors.slice(0) : [];\n // Cancel the pending notify timeout (if it exists) to prevent extraneous network\n // requests. To allow future notify timeouts, diff and dirty are reset as well.\n this.reset();\n if (\"incremental\" in result && isNonEmptyArray(result.incremental)) {\n var mergedData = mergeIncrementalData(this.getDiff().result, result);\n result.data = mergedData;\n // Detect the first chunk of a deferred query and merge it with existing\n // cache data. This ensures a `cache-first` fetch policy that returns\n // partial cache data or a `cache-and-network` fetch policy that already\n // has full data in the cache does not complain when trying to merge the\n // initial deferred server data with existing cache data.\n }\n else if (\"hasNext\" in result && result.hasNext) {\n var diff = this.getDiff();\n result.data = merger.merge(diff.result, result.data);\n }\n this.graphQLErrors = graphQLErrors;\n if (options.fetchPolicy === \"no-cache\") {\n this.updateLastDiff({ result: result.data, complete: true }, this.getDiffOptions(options.variables));\n }\n else if (cacheWriteBehavior !== 0 /* CacheWriteBehavior.FORBID */) {\n if (shouldWriteResult(result, options.errorPolicy)) {\n // Using a transaction here so we have a chance to read the result\n // back from the cache before the watch callback fires as a result\n // of writeQuery, so we can store the new diff quietly and ignore\n // it when we receive it redundantly from the watch callback.\n this.cache.performTransaction(function (cache) {\n if (_this.shouldWrite(result, options.variables)) {\n cache.writeQuery({\n query: document,\n data: result.data,\n variables: options.variables,\n overwrite: cacheWriteBehavior === 1 /* CacheWriteBehavior.OVERWRITE */,\n });\n _this.lastWrite = {\n result: result,\n variables: options.variables,\n dmCount: destructiveMethodCounts.get(_this.cache),\n };\n }\n else {\n // If result is the same as the last result we received from\n // the network (and the variables match too), avoid writing\n // result into the cache again. The wisdom of skipping this\n // cache write is far from obvious, since any cache write\n // could be the one that puts the cache back into a desired\n // state, fixing corruption or missing data. However, if we\n // always write every network result into the cache, we enable\n // feuds between queries competing to update the same data in\n // incompatible ways, which can lead to an endless cycle of\n // cache broadcasts and useless network requests. As with any\n // feud, eventually one side must step back from the brink,\n // letting the other side(s) have the last word(s). There may\n // be other points where we could break this cycle, such as\n // silencing the broadcast for cache.writeQuery (not a good\n // idea, since it just delays the feud a bit) or somehow\n // avoiding the network request that just happened (also bad,\n // because the server could return useful new data). All\n // options considered, skipping this cache write seems to be\n // the least damaging place to break the cycle, because it\n // reflects the intuition that we recently wrote this exact\n // result into the cache, so the cache *should* already/still\n // contain this data. If some other query has clobbered that\n // data in the meantime, that's too bad, but there will be no\n // winners if every query blindly reverts to its own version\n // of the data. This approach also gives the network a chance\n // to return new data, which will be written into the cache as\n // usual, notifying only those queries that are directly\n // affected by the cache updates, as usual. In the future, an\n // even more sophisticated cache could perhaps prevent or\n // mitigate the clobbering somehow, but that would make this\n // particular cache write even less important, and thus\n // skipping it would be even safer than it is today.\n if (_this.lastDiff && _this.lastDiff.diff.complete) {\n // Reuse data from the last good (complete) diff that we\n // received, when possible.\n result.data = _this.lastDiff.diff.result;\n return;\n }\n // If the previous this.diff was incomplete, fall through to\n // re-reading the latest data with cache.diff, below.\n }\n var diffOptions = _this.getDiffOptions(options.variables);\n var diff = cache.diff(diffOptions);\n // In case the QueryManager stops this QueryInfo before its\n // results are delivered, it's important to avoid restarting the\n // cache watch when markResult is called. We also avoid updating\n // the watch if we are writing a result that doesn't match the current\n // variables to avoid race conditions from broadcasting the wrong\n // result.\n if (!_this.stopped && equal(_this.variables, options.variables)) {\n // Any time we're about to update this.diff, we need to make\n // sure we've started watching the cache.\n _this.updateWatch(options.variables);\n }\n // If we're allowed to write to the cache, and we can read a\n // complete result from the cache, update result.data to be the\n // result from the cache, rather than the raw network result.\n // Set without setDiff to avoid triggering a notify call, since\n // we have other ways of notifying for this result.\n _this.updateLastDiff(diff, diffOptions);\n if (diff.complete) {\n result.data = diff.result;\n }\n });\n }\n else {\n this.lastWrite = void 0;\n }\n }\n };\n QueryInfo.prototype.markReady = function () {\n this.networkError = null;\n return (this.networkStatus = NetworkStatus.ready);\n };\n QueryInfo.prototype.markError = function (error) {\n this.networkStatus = NetworkStatus.error;\n this.lastWrite = void 0;\n this.reset();\n if (error.graphQLErrors) {\n this.graphQLErrors = error.graphQLErrors;\n }\n if (error.networkError) {\n this.networkError = error.networkError;\n }\n return error;\n };\n return QueryInfo;\n}());\nexport { QueryInfo };\nexport function shouldWriteResult(result, errorPolicy) {\n if (errorPolicy === void 0) { errorPolicy = \"none\"; }\n var ignoreErrors = errorPolicy === \"ignore\" || errorPolicy === \"all\";\n var writeWithErrors = !graphQLResultHasError(result);\n if (!writeWithErrors && ignoreErrors && result.data) {\n writeWithErrors = true;\n }\n return writeWithErrors;\n}\n//# sourceMappingURL=QueryInfo.js.map","import { __assign, __awaiter, __generator } from \"tslib\";\nimport { invariant, newInvariantError } from \"../utilities/globals/index.js\";\nimport { equal } from \"@wry/equality\";\nimport { execute } from \"../link/core/index.js\";\nimport { hasDirectives, isExecutionPatchIncrementalResult, isExecutionPatchResult, removeDirectivesFromDocument, } from \"../utilities/index.js\";\nimport { canonicalStringify } from \"../cache/index.js\";\nimport { getDefaultValues, getOperationDefinition, getOperationName, hasClientExports, graphQLResultHasError, getGraphQLErrorsFromResult, Observable, asyncMap, isNonEmptyArray, Concast, makeUniqueId, isDocumentNode, isNonNullObject, DocumentTransform, } from \"../utilities/index.js\";\nimport { mergeIncrementalData } from \"../utilities/common/incrementalResult.js\";\nimport { ApolloError, isApolloError, graphQLResultHasProtocolErrors, } from \"../errors/index.js\";\nimport { ObservableQuery, logMissingFieldErrors } from \"./ObservableQuery.js\";\nimport { NetworkStatus, isNetworkRequestInFlight } from \"./networkStatus.js\";\nimport { QueryInfo, shouldWriteResult, } from \"./QueryInfo.js\";\nimport { PROTOCOL_ERRORS_SYMBOL } from \"../errors/index.js\";\nimport { print } from \"../utilities/index.js\";\nvar hasOwnProperty = Object.prototype.hasOwnProperty;\nvar IGNORE = Object.create(null);\nimport { Trie } from \"@wry/trie\";\nimport { AutoCleanedWeakCache, cacheSizes } from \"../utilities/index.js\";\nvar QueryManager = /** @class */ (function () {\n function QueryManager(options) {\n var _this = this;\n this.clientAwareness = {};\n // All the queries that the QueryManager is currently managing (not\n // including mutations and subscriptions).\n this.queries = new Map();\n // Maps from queryId strings to Promise rejection functions for\n // currently active queries and fetches.\n // Use protected instead of private field so\n // @apollo/experimental-nextjs-app-support can access type info.\n this.fetchCancelFns = new Map();\n this.transformCache = new AutoCleanedWeakCache(cacheSizes[\"queryManager.getDocumentInfo\"] ||\n 2000 /* defaultCacheSizes[\"queryManager.getDocumentInfo\"] */);\n this.queryIdCounter = 1;\n this.requestIdCounter = 1;\n this.mutationIdCounter = 1;\n // Use protected instead of private field so\n // @apollo/experimental-nextjs-app-support can access type info.\n this.inFlightLinkObservables = new Trie(false);\n var defaultDocumentTransform = new DocumentTransform(function (document) { return _this.cache.transformDocument(document); }, \n // Allow the apollo cache to manage its own transform caches\n { cache: false });\n this.cache = options.cache;\n this.link = options.link;\n this.defaultOptions = options.defaultOptions;\n this.queryDeduplication = options.queryDeduplication;\n this.clientAwareness = options.clientAwareness;\n this.localState = options.localState;\n this.ssrMode = options.ssrMode;\n this.assumeImmutableResults = options.assumeImmutableResults;\n var documentTransform = options.documentTransform;\n this.documentTransform =\n documentTransform ?\n defaultDocumentTransform\n .concat(documentTransform)\n // The custom document transform may add new fragment spreads or new\n // field selections, so we want to give the cache a chance to run\n // again. For example, the InMemoryCache adds __typename to field\n // selections and fragments from the fragment registry.\n .concat(defaultDocumentTransform)\n : defaultDocumentTransform;\n this.defaultContext = options.defaultContext || Object.create(null);\n if ((this.onBroadcast = options.onBroadcast)) {\n this.mutationStore = Object.create(null);\n }\n }\n /**\n * Call this method to terminate any active query processes, making it safe\n * to dispose of this QueryManager instance.\n */\n QueryManager.prototype.stop = function () {\n var _this = this;\n this.queries.forEach(function (_info, queryId) {\n _this.stopQueryNoBroadcast(queryId);\n });\n this.cancelPendingFetches(newInvariantError(26));\n };\n QueryManager.prototype.cancelPendingFetches = function (error) {\n this.fetchCancelFns.forEach(function (cancel) { return cancel(error); });\n this.fetchCancelFns.clear();\n };\n QueryManager.prototype.mutate = function (_a) {\n return __awaiter(this, arguments, void 0, function (_b) {\n var mutationId, hasClientExports, mutationStoreValue, isOptimistic, self;\n var _c, _d;\n var mutation = _b.mutation, variables = _b.variables, optimisticResponse = _b.optimisticResponse, updateQueries = _b.updateQueries, _e = _b.refetchQueries, refetchQueries = _e === void 0 ? [] : _e, _f = _b.awaitRefetchQueries, awaitRefetchQueries = _f === void 0 ? false : _f, updateWithProxyFn = _b.update, onQueryUpdated = _b.onQueryUpdated, _g = _b.fetchPolicy, fetchPolicy = _g === void 0 ? ((_c = this.defaultOptions.mutate) === null || _c === void 0 ? void 0 : _c.fetchPolicy) || \"network-only\" : _g, _h = _b.errorPolicy, errorPolicy = _h === void 0 ? ((_d = this.defaultOptions.mutate) === null || _d === void 0 ? void 0 : _d.errorPolicy) || \"none\" : _h, keepRootFields = _b.keepRootFields, context = _b.context;\n return __generator(this, function (_j) {\n switch (_j.label) {\n case 0:\n invariant(mutation, 27);\n invariant(fetchPolicy === \"network-only\" || fetchPolicy === \"no-cache\", 28);\n mutationId = this.generateMutationId();\n mutation = this.cache.transformForLink(this.transform(mutation));\n hasClientExports = this.getDocumentInfo(mutation).hasClientExports;\n variables = this.getVariables(mutation, variables);\n if (!hasClientExports) return [3 /*break*/, 2];\n return [4 /*yield*/, this.localState.addExportedVariables(mutation, variables, context)];\n case 1:\n variables = (_j.sent());\n _j.label = 2;\n case 2:\n mutationStoreValue = this.mutationStore &&\n (this.mutationStore[mutationId] = {\n mutation: mutation,\n variables: variables,\n loading: true,\n error: null,\n });\n isOptimistic = optimisticResponse &&\n this.markMutationOptimistic(optimisticResponse, {\n mutationId: mutationId,\n document: mutation,\n variables: variables,\n fetchPolicy: fetchPolicy,\n errorPolicy: errorPolicy,\n context: context,\n updateQueries: updateQueries,\n update: updateWithProxyFn,\n keepRootFields: keepRootFields,\n });\n this.broadcastQueries();\n self = this;\n return [2 /*return*/, new Promise(function (resolve, reject) {\n return asyncMap(self.getObservableFromLink(mutation, __assign(__assign({}, context), { optimisticResponse: isOptimistic ? optimisticResponse : void 0 }), variables, {}, false), function (result) {\n if (graphQLResultHasError(result) && errorPolicy === \"none\") {\n throw new ApolloError({\n graphQLErrors: getGraphQLErrorsFromResult(result),\n });\n }\n if (mutationStoreValue) {\n mutationStoreValue.loading = false;\n mutationStoreValue.error = null;\n }\n var storeResult = __assign({}, result);\n if (typeof refetchQueries === \"function\") {\n refetchQueries = refetchQueries(storeResult);\n }\n if (errorPolicy === \"ignore\" && graphQLResultHasError(storeResult)) {\n delete storeResult.errors;\n }\n return self.markMutationResult({\n mutationId: mutationId,\n result: storeResult,\n document: mutation,\n variables: variables,\n fetchPolicy: fetchPolicy,\n errorPolicy: errorPolicy,\n context: context,\n update: updateWithProxyFn,\n updateQueries: updateQueries,\n awaitRefetchQueries: awaitRefetchQueries,\n refetchQueries: refetchQueries,\n removeOptimistic: isOptimistic ? mutationId : void 0,\n onQueryUpdated: onQueryUpdated,\n keepRootFields: keepRootFields,\n });\n }).subscribe({\n next: function (storeResult) {\n self.broadcastQueries();\n // Since mutations might receive multiple payloads from the\n // ApolloLink chain (e.g. when used with @defer),\n // we resolve with a SingleExecutionResult or after the final\n // ExecutionPatchResult has arrived and we have assembled the\n // multipart response into a single result.\n if (!(\"hasNext\" in storeResult) || storeResult.hasNext === false) {\n resolve(storeResult);\n }\n },\n error: function (err) {\n if (mutationStoreValue) {\n mutationStoreValue.loading = false;\n mutationStoreValue.error = err;\n }\n if (isOptimistic) {\n self.cache.removeOptimistic(mutationId);\n }\n self.broadcastQueries();\n reject(err instanceof ApolloError ? err : (new ApolloError({\n networkError: err,\n })));\n },\n });\n })];\n }\n });\n });\n };\n QueryManager.prototype.markMutationResult = function (mutation, cache) {\n var _this = this;\n if (cache === void 0) { cache = this.cache; }\n var result = mutation.result;\n var cacheWrites = [];\n var skipCache = mutation.fetchPolicy === \"no-cache\";\n if (!skipCache && shouldWriteResult(result, mutation.errorPolicy)) {\n if (!isExecutionPatchIncrementalResult(result)) {\n cacheWrites.push({\n result: result.data,\n dataId: \"ROOT_MUTATION\",\n query: mutation.document,\n variables: mutation.variables,\n });\n }\n if (isExecutionPatchIncrementalResult(result) &&\n isNonEmptyArray(result.incremental)) {\n var diff = cache.diff({\n id: \"ROOT_MUTATION\",\n // The cache complains if passed a mutation where it expects a\n // query, so we transform mutations and subscriptions to queries\n // (only once, thanks to this.transformCache).\n query: this.getDocumentInfo(mutation.document).asQuery,\n variables: mutation.variables,\n optimistic: false,\n returnPartialData: true,\n });\n var mergedData = void 0;\n if (diff.result) {\n mergedData = mergeIncrementalData(diff.result, result);\n }\n if (typeof mergedData !== \"undefined\") {\n // cast the ExecutionPatchResult to FetchResult here since\n // ExecutionPatchResult never has `data` when returned from the server\n result.data = mergedData;\n cacheWrites.push({\n result: mergedData,\n dataId: \"ROOT_MUTATION\",\n query: mutation.document,\n variables: mutation.variables,\n });\n }\n }\n var updateQueries_1 = mutation.updateQueries;\n if (updateQueries_1) {\n this.queries.forEach(function (_a, queryId) {\n var observableQuery = _a.observableQuery;\n var queryName = observableQuery && observableQuery.queryName;\n if (!queryName || !hasOwnProperty.call(updateQueries_1, queryName)) {\n return;\n }\n var updater = updateQueries_1[queryName];\n var _b = _this.queries.get(queryId), document = _b.document, variables = _b.variables;\n // Read the current query result from the store.\n var _c = cache.diff({\n query: document,\n variables: variables,\n returnPartialData: true,\n optimistic: false,\n }), currentQueryResult = _c.result, complete = _c.complete;\n if (complete && currentQueryResult) {\n // Run our reducer using the current query result and the mutation result.\n var nextQueryResult = updater(currentQueryResult, {\n mutationResult: result,\n queryName: (document && getOperationName(document)) || void 0,\n queryVariables: variables,\n });\n // Write the modified result back into the store if we got a new result.\n if (nextQueryResult) {\n cacheWrites.push({\n result: nextQueryResult,\n dataId: \"ROOT_QUERY\",\n query: document,\n variables: variables,\n });\n }\n }\n });\n }\n }\n if (cacheWrites.length > 0 ||\n (mutation.refetchQueries || \"\").length > 0 ||\n mutation.update ||\n mutation.onQueryUpdated ||\n mutation.removeOptimistic) {\n var results_1 = [];\n this.refetchQueries({\n updateCache: function (cache) {\n if (!skipCache) {\n cacheWrites.forEach(function (write) { return cache.write(write); });\n }\n // If the mutation has some writes associated with it then we need to\n // apply those writes to the store by running this reducer again with\n // a write action.\n var update = mutation.update;\n // Determine whether result is a SingleExecutionResult,\n // or the final ExecutionPatchResult.\n var isFinalResult = !isExecutionPatchResult(result) ||\n (isExecutionPatchIncrementalResult(result) && !result.hasNext);\n if (update) {\n if (!skipCache) {\n // Re-read the ROOT_MUTATION data we just wrote into the cache\n // (the first cache.write call in the cacheWrites.forEach loop\n // above), so field read functions have a chance to run for\n // fields within mutation result objects.\n var diff = cache.diff({\n id: \"ROOT_MUTATION\",\n // The cache complains if passed a mutation where it expects a\n // query, so we transform mutations and subscriptions to queries\n // (only once, thanks to this.transformCache).\n query: _this.getDocumentInfo(mutation.document).asQuery,\n variables: mutation.variables,\n optimistic: false,\n returnPartialData: true,\n });\n if (diff.complete) {\n result = __assign(__assign({}, result), { data: diff.result });\n if (\"incremental\" in result) {\n delete result.incremental;\n }\n if (\"hasNext\" in result) {\n delete result.hasNext;\n }\n }\n }\n // If we've received the whole response,\n // either a SingleExecutionResult or the final ExecutionPatchResult,\n // call the update function.\n if (isFinalResult) {\n update(cache, result, {\n context: mutation.context,\n variables: mutation.variables,\n });\n }\n }\n // TODO Do this with cache.evict({ id: 'ROOT_MUTATION' }) but make it\n // shallow to allow rolling back optimistic evictions.\n if (!skipCache && !mutation.keepRootFields && isFinalResult) {\n cache.modify({\n id: \"ROOT_MUTATION\",\n fields: function (value, _a) {\n var fieldName = _a.fieldName, DELETE = _a.DELETE;\n return fieldName === \"__typename\" ? value : DELETE;\n },\n });\n }\n },\n include: mutation.refetchQueries,\n // Write the final mutation.result to the root layer of the cache.\n optimistic: false,\n // Remove the corresponding optimistic layer at the same time as we\n // write the final non-optimistic result.\n removeOptimistic: mutation.removeOptimistic,\n // Let the caller of client.mutate optionally determine the refetching\n // behavior for watched queries after the mutation.update function runs.\n // If no onQueryUpdated function was provided for this mutation, pass\n // null instead of undefined to disable the default refetching behavior.\n onQueryUpdated: mutation.onQueryUpdated || null,\n }).forEach(function (result) { return results_1.push(result); });\n if (mutation.awaitRefetchQueries || mutation.onQueryUpdated) {\n // Returning a promise here makes the mutation await that promise, so we\n // include results in that promise's work if awaitRefetchQueries or an\n // onQueryUpdated function was specified.\n return Promise.all(results_1).then(function () { return result; });\n }\n }\n return Promise.resolve(result);\n };\n QueryManager.prototype.markMutationOptimistic = function (optimisticResponse, mutation) {\n var _this = this;\n var data = typeof optimisticResponse === \"function\" ?\n optimisticResponse(mutation.variables, { IGNORE: IGNORE })\n : optimisticResponse;\n if (data === IGNORE) {\n return false;\n }\n this.cache.recordOptimisticTransaction(function (cache) {\n try {\n _this.markMutationResult(__assign(__assign({}, mutation), { result: { data: data } }), cache);\n }\n catch (error) {\n globalThis.__DEV__ !== false && invariant.error(error);\n }\n }, mutation.mutationId);\n return true;\n };\n QueryManager.prototype.fetchQuery = function (queryId, options, networkStatus) {\n return this.fetchConcastWithInfo(queryId, options, networkStatus).concast\n .promise;\n };\n QueryManager.prototype.getQueryStore = function () {\n var store = Object.create(null);\n this.queries.forEach(function (info, queryId) {\n store[queryId] = {\n variables: info.variables,\n networkStatus: info.networkStatus,\n networkError: info.networkError,\n graphQLErrors: info.graphQLErrors,\n };\n });\n return store;\n };\n QueryManager.prototype.resetErrors = function (queryId) {\n var queryInfo = this.queries.get(queryId);\n if (queryInfo) {\n queryInfo.networkError = undefined;\n queryInfo.graphQLErrors = [];\n }\n };\n QueryManager.prototype.transform = function (document) {\n return this.documentTransform.transformDocument(document);\n };\n QueryManager.prototype.getDocumentInfo = function (document) {\n var transformCache = this.transformCache;\n if (!transformCache.has(document)) {\n var cacheEntry = {\n // TODO These three calls (hasClientExports, shouldForceResolvers, and\n // usesNonreactiveDirective) are performing independent full traversals\n // of the transformed document. We should consider merging these\n // traversals into a single pass in the future, though the work is\n // cached after the first time.\n hasClientExports: hasClientExports(document),\n hasForcedResolvers: this.localState.shouldForceResolvers(document),\n hasNonreactiveDirective: hasDirectives([\"nonreactive\"], document),\n clientQuery: this.localState.clientQuery(document),\n serverQuery: removeDirectivesFromDocument([\n { name: \"client\", remove: true },\n { name: \"connection\" },\n { name: \"nonreactive\" },\n ], document),\n defaultVars: getDefaultValues(getOperationDefinition(document)),\n // Transform any mutation or subscription operations to query operations\n // so we can read/write them from/to the cache.\n asQuery: __assign(__assign({}, document), { definitions: document.definitions.map(function (def) {\n if (def.kind === \"OperationDefinition\" &&\n def.operation !== \"query\") {\n return __assign(__assign({}, def), { operation: \"query\" });\n }\n return def;\n }) }),\n };\n transformCache.set(document, cacheEntry);\n }\n return transformCache.get(document);\n };\n QueryManager.prototype.getVariables = function (document, variables) {\n return __assign(__assign({}, this.getDocumentInfo(document).defaultVars), variables);\n };\n QueryManager.prototype.watchQuery = function (options) {\n var query = this.transform(options.query);\n // assign variable default values if supplied\n // NOTE: We don't modify options.query here with the transformed query to\n // ensure observable.options.query is set to the raw untransformed query.\n options = __assign(__assign({}, options), { variables: this.getVariables(query, options.variables) });\n if (typeof options.notifyOnNetworkStatusChange === \"undefined\") {\n options.notifyOnNetworkStatusChange = false;\n }\n var queryInfo = new QueryInfo(this);\n var observable = new ObservableQuery({\n queryManager: this,\n queryInfo: queryInfo,\n options: options,\n });\n observable[\"lastQuery\"] = query;\n this.queries.set(observable.queryId, queryInfo);\n // We give queryInfo the transformed query to ensure the first cache diff\n // uses the transformed query instead of the raw query\n queryInfo.init({\n document: query,\n observableQuery: observable,\n variables: observable.variables,\n });\n return observable;\n };\n QueryManager.prototype.query = function (options, queryId) {\n var _this = this;\n if (queryId === void 0) { queryId = this.generateQueryId(); }\n invariant(options.query, 29);\n invariant(options.query.kind === \"Document\", 30);\n invariant(!options.returnPartialData, 31);\n invariant(!options.pollInterval, 32);\n return this.fetchQuery(queryId, __assign(__assign({}, options), { query: this.transform(options.query) })).finally(function () { return _this.stopQuery(queryId); });\n };\n QueryManager.prototype.generateQueryId = function () {\n return String(this.queryIdCounter++);\n };\n QueryManager.prototype.generateRequestId = function () {\n return this.requestIdCounter++;\n };\n QueryManager.prototype.generateMutationId = function () {\n return String(this.mutationIdCounter++);\n };\n QueryManager.prototype.stopQueryInStore = function (queryId) {\n this.stopQueryInStoreNoBroadcast(queryId);\n this.broadcastQueries();\n };\n QueryManager.prototype.stopQueryInStoreNoBroadcast = function (queryId) {\n var queryInfo = this.queries.get(queryId);\n if (queryInfo)\n queryInfo.stop();\n };\n QueryManager.prototype.clearStore = function (options) {\n if (options === void 0) { options = {\n discardWatches: true,\n }; }\n // Before we have sent the reset action to the store, we can no longer\n // rely on the results returned by in-flight requests since these may\n // depend on values that previously existed in the data portion of the\n // store. So, we cancel the promises and observers that we have issued\n // so far and not yet resolved (in the case of queries).\n this.cancelPendingFetches(newInvariantError(33));\n this.queries.forEach(function (queryInfo) {\n if (queryInfo.observableQuery) {\n // Set loading to true so listeners don't trigger unless they want\n // results with partial data.\n queryInfo.networkStatus = NetworkStatus.loading;\n }\n else {\n queryInfo.stop();\n }\n });\n if (this.mutationStore) {\n this.mutationStore = Object.create(null);\n }\n // begin removing data from the store\n return this.cache.reset(options);\n };\n QueryManager.prototype.getObservableQueries = function (include) {\n var _this = this;\n if (include === void 0) { include = \"active\"; }\n var queries = new Map();\n var queryNamesAndDocs = new Map();\n var legacyQueryOptions = new Set();\n if (Array.isArray(include)) {\n include.forEach(function (desc) {\n if (typeof desc === \"string\") {\n queryNamesAndDocs.set(desc, false);\n }\n else if (isDocumentNode(desc)) {\n queryNamesAndDocs.set(_this.transform(desc), false);\n }\n else if (isNonNullObject(desc) && desc.query) {\n legacyQueryOptions.add(desc);\n }\n });\n }\n this.queries.forEach(function (_a, queryId) {\n var oq = _a.observableQuery, document = _a.document;\n if (oq) {\n if (include === \"all\") {\n queries.set(queryId, oq);\n return;\n }\n var queryName = oq.queryName, fetchPolicy = oq.options.fetchPolicy;\n if (fetchPolicy === \"standby\" ||\n (include === \"active\" && !oq.hasObservers())) {\n return;\n }\n if (include === \"active\" ||\n (queryName && queryNamesAndDocs.has(queryName)) ||\n (document && queryNamesAndDocs.has(document))) {\n queries.set(queryId, oq);\n if (queryName)\n queryNamesAndDocs.set(queryName, true);\n if (document)\n queryNamesAndDocs.set(document, true);\n }\n }\n });\n if (legacyQueryOptions.size) {\n legacyQueryOptions.forEach(function (options) {\n // We will be issuing a fresh network request for this query, so we\n // pre-allocate a new query ID here, using a special prefix to enable\n // cleaning up these temporary queries later, after fetching.\n var queryId = makeUniqueId(\"legacyOneTimeQuery\");\n var queryInfo = _this.getQuery(queryId).init({\n document: options.query,\n variables: options.variables,\n });\n var oq = new ObservableQuery({\n queryManager: _this,\n queryInfo: queryInfo,\n options: __assign(__assign({}, options), { fetchPolicy: \"network-only\" }),\n });\n invariant(oq.queryId === queryId);\n queryInfo.setObservableQuery(oq);\n queries.set(queryId, oq);\n });\n }\n if (globalThis.__DEV__ !== false && queryNamesAndDocs.size) {\n queryNamesAndDocs.forEach(function (included, nameOrDoc) {\n if (!included) {\n globalThis.__DEV__ !== false && invariant.warn(typeof nameOrDoc === \"string\" ? 34 : 35, nameOrDoc);\n }\n });\n }\n return queries;\n };\n QueryManager.prototype.reFetchObservableQueries = function (includeStandby) {\n var _this = this;\n if (includeStandby === void 0) { includeStandby = false; }\n var observableQueryPromises = [];\n this.getObservableQueries(includeStandby ? \"all\" : \"active\").forEach(function (observableQuery, queryId) {\n var fetchPolicy = observableQuery.options.fetchPolicy;\n observableQuery.resetLastResults();\n if (includeStandby ||\n (fetchPolicy !== \"standby\" && fetchPolicy !== \"cache-only\")) {\n observableQueryPromises.push(observableQuery.refetch());\n }\n _this.getQuery(queryId).setDiff(null);\n });\n this.broadcastQueries();\n return Promise.all(observableQueryPromises);\n };\n QueryManager.prototype.setObservableQuery = function (observableQuery) {\n this.getQuery(observableQuery.queryId).setObservableQuery(observableQuery);\n };\n QueryManager.prototype.startGraphQLSubscription = function (_a) {\n var _this = this;\n var query = _a.query, fetchPolicy = _a.fetchPolicy, _b = _a.errorPolicy, errorPolicy = _b === void 0 ? \"none\" : _b, variables = _a.variables, _c = _a.context, context = _c === void 0 ? {} : _c, _d = _a.extensions, extensions = _d === void 0 ? {} : _d;\n query = this.transform(query);\n variables = this.getVariables(query, variables);\n var makeObservable = function (variables) {\n return _this.getObservableFromLink(query, context, variables, extensions).map(function (result) {\n if (fetchPolicy !== \"no-cache\") {\n // the subscription interface should handle not sending us results we no longer subscribe to.\n // XXX I don't think we ever send in an object with errors, but we might in the future...\n if (shouldWriteResult(result, errorPolicy)) {\n _this.cache.write({\n query: query,\n result: result.data,\n dataId: \"ROOT_SUBSCRIPTION\",\n variables: variables,\n });\n }\n _this.broadcastQueries();\n }\n var hasErrors = graphQLResultHasError(result);\n var hasProtocolErrors = graphQLResultHasProtocolErrors(result);\n if (hasErrors || hasProtocolErrors) {\n var errors = {};\n if (hasErrors) {\n errors.graphQLErrors = result.errors;\n }\n if (hasProtocolErrors) {\n errors.protocolErrors = result.extensions[PROTOCOL_ERRORS_SYMBOL];\n }\n // `errorPolicy` is a mechanism for handling GraphQL errors, according\n // to our documentation, so we throw protocol errors regardless of the\n // set error policy.\n if (errorPolicy === \"none\" || hasProtocolErrors) {\n throw new ApolloError(errors);\n }\n }\n if (errorPolicy === \"ignore\") {\n delete result.errors;\n }\n return result;\n });\n };\n if (this.getDocumentInfo(query).hasClientExports) {\n var observablePromise_1 = this.localState\n .addExportedVariables(query, variables, context)\n .then(makeObservable);\n return new Observable(function (observer) {\n var sub = null;\n observablePromise_1.then(function (observable) { return (sub = observable.subscribe(observer)); }, observer.error);\n return function () { return sub && sub.unsubscribe(); };\n });\n }\n return makeObservable(variables);\n };\n QueryManager.prototype.stopQuery = function (queryId) {\n this.stopQueryNoBroadcast(queryId);\n this.broadcastQueries();\n };\n QueryManager.prototype.stopQueryNoBroadcast = function (queryId) {\n this.stopQueryInStoreNoBroadcast(queryId);\n this.removeQuery(queryId);\n };\n QueryManager.prototype.removeQuery = function (queryId) {\n // teardown all links\n // Both `QueryManager.fetchRequest` and `QueryManager.query` create separate promises\n // that each add their reject functions to fetchCancelFns.\n // A query created with `QueryManager.query()` could trigger a `QueryManager.fetchRequest`.\n // The same queryId could have two rejection fns for two promises\n this.fetchCancelFns.delete(queryId);\n if (this.queries.has(queryId)) {\n this.getQuery(queryId).stop();\n this.queries.delete(queryId);\n }\n };\n QueryManager.prototype.broadcastQueries = function () {\n if (this.onBroadcast)\n this.onBroadcast();\n this.queries.forEach(function (info) { return info.notify(); });\n };\n QueryManager.prototype.getLocalState = function () {\n return this.localState;\n };\n QueryManager.prototype.getObservableFromLink = function (query, context, variables, extensions, \n // Prefer context.queryDeduplication if specified.\n deduplication) {\n var _this = this;\n var _a;\n if (deduplication === void 0) { deduplication = (_a = context === null || context === void 0 ? void 0 : context.queryDeduplication) !== null && _a !== void 0 ? _a : this.queryDeduplication; }\n var observable;\n var _b = this.getDocumentInfo(query), serverQuery = _b.serverQuery, clientQuery = _b.clientQuery;\n if (serverQuery) {\n var _c = this, inFlightLinkObservables_1 = _c.inFlightLinkObservables, link = _c.link;\n var operation = {\n query: serverQuery,\n variables: variables,\n operationName: getOperationName(serverQuery) || void 0,\n context: this.prepareContext(__assign(__assign({}, context), { forceFetch: !deduplication })),\n extensions: extensions,\n };\n context = operation.context;\n if (deduplication) {\n var printedServerQuery_1 = print(serverQuery);\n var varJson_1 = canonicalStringify(variables);\n var entry = inFlightLinkObservables_1.lookup(printedServerQuery_1, varJson_1);\n observable = entry.observable;\n if (!observable) {\n var concast = new Concast([\n execute(link, operation),\n ]);\n observable = entry.observable = concast;\n concast.beforeNext(function () {\n inFlightLinkObservables_1.remove(printedServerQuery_1, varJson_1);\n });\n }\n }\n else {\n observable = new Concast([\n execute(link, operation),\n ]);\n }\n }\n else {\n observable = new Concast([Observable.of({ data: {} })]);\n context = this.prepareContext(context);\n }\n if (clientQuery) {\n observable = asyncMap(observable, function (result) {\n return _this.localState.runResolvers({\n document: clientQuery,\n remoteResult: result,\n context: context,\n variables: variables,\n });\n });\n }\n return observable;\n };\n QueryManager.prototype.getResultsFromLink = function (queryInfo, cacheWriteBehavior, options) {\n var requestId = (queryInfo.lastRequestId = this.generateRequestId());\n // Performing transformForLink here gives this.cache a chance to fill in\n // missing fragment definitions (for example) before sending this document\n // through the link chain.\n var linkDocument = this.cache.transformForLink(options.query);\n return asyncMap(this.getObservableFromLink(linkDocument, options.context, options.variables), function (result) {\n var graphQLErrors = getGraphQLErrorsFromResult(result);\n var hasErrors = graphQLErrors.length > 0;\n var errorPolicy = options.errorPolicy;\n // If we interrupted this request by calling getResultsFromLink again\n // with the same QueryInfo object, we ignore the old results.\n if (requestId >= queryInfo.lastRequestId) {\n if (hasErrors && errorPolicy === \"none\") {\n // Throwing here effectively calls observer.error.\n throw queryInfo.markError(new ApolloError({\n graphQLErrors: graphQLErrors,\n }));\n }\n // Use linkDocument rather than queryInfo.document so the\n // operation/fragments used to write the result are the same as the\n // ones used to obtain it from the link.\n queryInfo.markResult(result, linkDocument, options, cacheWriteBehavior);\n queryInfo.markReady();\n }\n var aqr = {\n data: result.data,\n loading: false,\n networkStatus: NetworkStatus.ready,\n };\n // In the case we start multiple network requests simulatenously, we\n // want to ensure we properly set `data` if we're reporting on an old\n // result which will not be caught by the conditional above that ends up\n // throwing the markError result.\n if (hasErrors && errorPolicy === \"none\") {\n aqr.data = void 0;\n }\n if (hasErrors && errorPolicy !== \"ignore\") {\n aqr.errors = graphQLErrors;\n aqr.networkStatus = NetworkStatus.error;\n }\n return aqr;\n }, function (networkError) {\n var error = isApolloError(networkError) ? networkError : (new ApolloError({ networkError: networkError }));\n // Avoid storing errors from older interrupted queries.\n if (requestId >= queryInfo.lastRequestId) {\n queryInfo.markError(error);\n }\n throw error;\n });\n };\n QueryManager.prototype.fetchConcastWithInfo = function (queryId, options, \n // The initial networkStatus for this fetch, most often\n // NetworkStatus.loading, but also possibly fetchMore, poll, refetch,\n // or setVariables.\n networkStatus, query) {\n var _this = this;\n if (networkStatus === void 0) { networkStatus = NetworkStatus.loading; }\n if (query === void 0) { query = options.query; }\n var variables = this.getVariables(query, options.variables);\n var queryInfo = this.getQuery(queryId);\n var defaults = this.defaultOptions.watchQuery;\n var _a = options.fetchPolicy, fetchPolicy = _a === void 0 ? (defaults && defaults.fetchPolicy) || \"cache-first\" : _a, _b = options.errorPolicy, errorPolicy = _b === void 0 ? (defaults && defaults.errorPolicy) || \"none\" : _b, _c = options.returnPartialData, returnPartialData = _c === void 0 ? false : _c, _d = options.notifyOnNetworkStatusChange, notifyOnNetworkStatusChange = _d === void 0 ? false : _d, _e = options.context, context = _e === void 0 ? {} : _e;\n var normalized = Object.assign({}, options, {\n query: query,\n variables: variables,\n fetchPolicy: fetchPolicy,\n errorPolicy: errorPolicy,\n returnPartialData: returnPartialData,\n notifyOnNetworkStatusChange: notifyOnNetworkStatusChange,\n context: context,\n });\n var fromVariables = function (variables) {\n // Since normalized is always a fresh copy of options, it's safe to\n // modify its properties here, rather than creating yet another new\n // WatchQueryOptions object.\n normalized.variables = variables;\n var sourcesWithInfo = _this.fetchQueryByPolicy(queryInfo, normalized, networkStatus);\n if (\n // If we're in standby, postpone advancing options.fetchPolicy using\n // applyNextFetchPolicy.\n normalized.fetchPolicy !== \"standby\" &&\n // The \"standby\" policy currently returns [] from fetchQueryByPolicy, so\n // this is another way to detect when nothing was done/fetched.\n sourcesWithInfo.sources.length > 0 &&\n queryInfo.observableQuery) {\n queryInfo.observableQuery[\"applyNextFetchPolicy\"](\"after-fetch\", options);\n }\n return sourcesWithInfo;\n };\n // This cancel function needs to be set before the concast is created,\n // in case concast creation synchronously cancels the request.\n var cleanupCancelFn = function () { return _this.fetchCancelFns.delete(queryId); };\n this.fetchCancelFns.set(queryId, function (reason) {\n cleanupCancelFn();\n // This delay ensures the concast variable has been initialized.\n setTimeout(function () { return concast.cancel(reason); });\n });\n var concast, containsDataFromLink;\n // If the query has @export(as: ...) directives, then we need to\n // process those directives asynchronously. When there are no\n // @export directives (the common case), we deliberately avoid\n // wrapping the result of this.fetchQueryByPolicy in a Promise,\n // since the timing of result delivery is (unfortunately) important\n // for backwards compatibility. TODO This code could be simpler if\n // we deprecated and removed LocalState.\n if (this.getDocumentInfo(normalized.query).hasClientExports) {\n concast = new Concast(this.localState\n .addExportedVariables(normalized.query, normalized.variables, normalized.context)\n .then(fromVariables)\n .then(function (sourcesWithInfo) { return sourcesWithInfo.sources; }));\n // there is just no way we can synchronously get the *right* value here,\n // so we will assume `true`, which is the behaviour before the bug fix in\n // #10597. This means that bug is not fixed in that case, and is probably\n // un-fixable with reasonable effort for the edge case of @export as\n // directives.\n containsDataFromLink = true;\n }\n else {\n var sourcesWithInfo = fromVariables(normalized.variables);\n containsDataFromLink = sourcesWithInfo.fromLink;\n concast = new Concast(sourcesWithInfo.sources);\n }\n concast.promise.then(cleanupCancelFn, cleanupCancelFn);\n return {\n concast: concast,\n fromLink: containsDataFromLink,\n };\n };\n QueryManager.prototype.refetchQueries = function (_a) {\n var _this = this;\n var updateCache = _a.updateCache, include = _a.include, _b = _a.optimistic, optimistic = _b === void 0 ? false : _b, _c = _a.removeOptimistic, removeOptimistic = _c === void 0 ? optimistic ? makeUniqueId(\"refetchQueries\") : void 0 : _c, onQueryUpdated = _a.onQueryUpdated;\n var includedQueriesById = new Map();\n if (include) {\n this.getObservableQueries(include).forEach(function (oq, queryId) {\n includedQueriesById.set(queryId, {\n oq: oq,\n lastDiff: _this.getQuery(queryId).getDiff(),\n });\n });\n }\n var results = new Map();\n if (updateCache) {\n this.cache.batch({\n update: updateCache,\n // Since you can perform any combination of cache reads and/or writes in\n // the cache.batch update function, its optimistic option can be either\n // a boolean or a string, representing three distinct modes of\n // operation:\n //\n // * false: read/write only the root layer\n // * true: read/write the topmost layer\n // * string: read/write a fresh optimistic layer with that ID string\n //\n // When typeof optimistic === \"string\", a new optimistic layer will be\n // temporarily created within cache.batch with that string as its ID. If\n // we then pass that same string as the removeOptimistic option, we can\n // make cache.batch immediately remove the optimistic layer after\n // running the updateCache function, triggering only one broadcast.\n //\n // However, the refetchQueries method accepts only true or false for its\n // optimistic option (not string). We interpret true to mean a temporary\n // optimistic layer should be created, to allow efficiently rolling back\n // the effect of the updateCache function, which involves passing a\n // string instead of true as the optimistic option to cache.batch, when\n // refetchQueries receives optimistic: true.\n //\n // In other words, we are deliberately not supporting the use case of\n // writing to an *existing* optimistic layer (using the refetchQueries\n // updateCache function), since that would potentially interfere with\n // other optimistic updates in progress. Instead, you can read/write\n // only the root layer by passing optimistic: false to refetchQueries,\n // or you can read/write a brand new optimistic layer that will be\n // automatically removed by passing optimistic: true.\n optimistic: (optimistic && removeOptimistic) || false,\n // The removeOptimistic option can also be provided by itself, even if\n // optimistic === false, to remove some previously-added optimistic\n // layer safely and efficiently, like we do in markMutationResult.\n //\n // If an explicit removeOptimistic string is provided with optimistic:\n // true, the removeOptimistic string will determine the ID of the\n // temporary optimistic layer, in case that ever matters.\n removeOptimistic: removeOptimistic,\n onWatchUpdated: function (watch, diff, lastDiff) {\n var oq = watch.watcher instanceof QueryInfo && watch.watcher.observableQuery;\n if (oq) {\n if (onQueryUpdated) {\n // Since we're about to handle this query now, remove it from\n // includedQueriesById, in case it was added earlier because of\n // options.include.\n includedQueriesById.delete(oq.queryId);\n var result = onQueryUpdated(oq, diff, lastDiff);\n if (result === true) {\n // The onQueryUpdated function requested the default refetching\n // behavior by returning true.\n result = oq.refetch();\n }\n // Record the result in the results Map, as long as onQueryUpdated\n // did not return false to skip/ignore this result.\n if (result !== false) {\n results.set(oq, result);\n }\n // Allow the default cache broadcast to happen, except when\n // onQueryUpdated returns false.\n return result;\n }\n if (onQueryUpdated !== null) {\n // If we don't have an onQueryUpdated function, and onQueryUpdated\n // was not disabled by passing null, make sure this query is\n // \"included\" like any other options.include-specified query.\n includedQueriesById.set(oq.queryId, { oq: oq, lastDiff: lastDiff, diff: diff });\n }\n }\n },\n });\n }\n if (includedQueriesById.size) {\n includedQueriesById.forEach(function (_a, queryId) {\n var oq = _a.oq, lastDiff = _a.lastDiff, diff = _a.diff;\n var result;\n // If onQueryUpdated is provided, we want to use it for all included\n // queries, even the QueryOptions ones.\n if (onQueryUpdated) {\n if (!diff) {\n var info = oq[\"queryInfo\"];\n info.reset(); // Force info.getDiff() to read from cache.\n diff = info.getDiff();\n }\n result = onQueryUpdated(oq, diff, lastDiff);\n }\n // Otherwise, we fall back to refetching.\n if (!onQueryUpdated || result === true) {\n result = oq.refetch();\n }\n if (result !== false) {\n results.set(oq, result);\n }\n if (queryId.indexOf(\"legacyOneTimeQuery\") >= 0) {\n _this.stopQueryNoBroadcast(queryId);\n }\n });\n }\n if (removeOptimistic) {\n // In case no updateCache callback was provided (so cache.batch was not\n // called above, and thus did not already remove the optimistic layer),\n // remove it here. Since this is a no-op when the layer has already been\n // removed, we do it even if we called cache.batch above, since it's\n // possible this.cache is an instance of some ApolloCache subclass other\n // than InMemoryCache, and does not fully support the removeOptimistic\n // option for cache.batch.\n this.cache.removeOptimistic(removeOptimistic);\n }\n return results;\n };\n QueryManager.prototype.fetchQueryByPolicy = function (queryInfo, _a, \n // The initial networkStatus for this fetch, most often\n // NetworkStatus.loading, but also possibly fetchMore, poll, refetch,\n // or setVariables.\n networkStatus) {\n var _this = this;\n var query = _a.query, variables = _a.variables, fetchPolicy = _a.fetchPolicy, refetchWritePolicy = _a.refetchWritePolicy, errorPolicy = _a.errorPolicy, returnPartialData = _a.returnPartialData, context = _a.context, notifyOnNetworkStatusChange = _a.notifyOnNetworkStatusChange;\n var oldNetworkStatus = queryInfo.networkStatus;\n queryInfo.init({\n document: query,\n variables: variables,\n networkStatus: networkStatus,\n });\n var readCache = function () { return queryInfo.getDiff(); };\n var resultsFromCache = function (diff, networkStatus) {\n if (networkStatus === void 0) { networkStatus = queryInfo.networkStatus || NetworkStatus.loading; }\n var data = diff.result;\n if (globalThis.__DEV__ !== false && !returnPartialData && !equal(data, {})) {\n logMissingFieldErrors(diff.missing);\n }\n var fromData = function (data) {\n return Observable.of(__assign({ data: data, loading: isNetworkRequestInFlight(networkStatus), networkStatus: networkStatus }, (diff.complete ? null : { partial: true })));\n };\n if (data && _this.getDocumentInfo(query).hasForcedResolvers) {\n return _this.localState\n .runResolvers({\n document: query,\n remoteResult: { data: data },\n context: context,\n variables: variables,\n onlyRunForcedResolvers: true,\n })\n .then(function (resolved) { return fromData(resolved.data || void 0); });\n }\n // Resolves https://github.com/apollographql/apollo-client/issues/10317.\n // If errorPolicy is 'none' and notifyOnNetworkStatusChange is true,\n // data was incorrectly returned from the cache on refetch:\n // if diff.missing exists, we should not return cache data.\n if (errorPolicy === \"none\" &&\n networkStatus === NetworkStatus.refetch &&\n Array.isArray(diff.missing)) {\n return fromData(void 0);\n }\n return fromData(data);\n };\n var cacheWriteBehavior = fetchPolicy === \"no-cache\" ? 0 /* CacheWriteBehavior.FORBID */\n // Watched queries must opt into overwriting existing data on refetch,\n // by passing refetchWritePolicy: \"overwrite\" in their WatchQueryOptions.\n : (networkStatus === NetworkStatus.refetch &&\n refetchWritePolicy !== \"merge\") ?\n 1 /* CacheWriteBehavior.OVERWRITE */\n : 2 /* CacheWriteBehavior.MERGE */;\n var resultsFromLink = function () {\n return _this.getResultsFromLink(queryInfo, cacheWriteBehavior, {\n query: query,\n variables: variables,\n context: context,\n fetchPolicy: fetchPolicy,\n errorPolicy: errorPolicy,\n });\n };\n var shouldNotify = notifyOnNetworkStatusChange &&\n typeof oldNetworkStatus === \"number\" &&\n oldNetworkStatus !== networkStatus &&\n isNetworkRequestInFlight(networkStatus);\n switch (fetchPolicy) {\n default:\n case \"cache-first\": {\n var diff = readCache();\n if (diff.complete) {\n return {\n fromLink: false,\n sources: [resultsFromCache(diff, queryInfo.markReady())],\n };\n }\n if (returnPartialData || shouldNotify) {\n return {\n fromLink: true,\n sources: [resultsFromCache(diff), resultsFromLink()],\n };\n }\n return { fromLink: true, sources: [resultsFromLink()] };\n }\n case \"cache-and-network\": {\n var diff = readCache();\n if (diff.complete || returnPartialData || shouldNotify) {\n return {\n fromLink: true,\n sources: [resultsFromCache(diff), resultsFromLink()],\n };\n }\n return { fromLink: true, sources: [resultsFromLink()] };\n }\n case \"cache-only\":\n return {\n fromLink: false,\n sources: [resultsFromCache(readCache(), queryInfo.markReady())],\n };\n case \"network-only\":\n if (shouldNotify) {\n return {\n fromLink: true,\n sources: [resultsFromCache(readCache()), resultsFromLink()],\n };\n }\n return { fromLink: true, sources: [resultsFromLink()] };\n case \"no-cache\":\n if (shouldNotify) {\n return {\n fromLink: true,\n // Note that queryInfo.getDiff() for no-cache queries does not call\n // cache.diff, but instead returns a { complete: false } stub result\n // when there is no queryInfo.diff already defined.\n sources: [resultsFromCache(queryInfo.getDiff()), resultsFromLink()],\n };\n }\n return { fromLink: true, sources: [resultsFromLink()] };\n case \"standby\":\n return { fromLink: false, sources: [] };\n }\n };\n QueryManager.prototype.getQuery = function (queryId) {\n if (queryId && !this.queries.has(queryId)) {\n this.queries.set(queryId, new QueryInfo(this, queryId));\n }\n return this.queries.get(queryId);\n };\n QueryManager.prototype.prepareContext = function (context) {\n if (context === void 0) { context = {}; }\n var newContext = this.localState.prepareContext(context);\n return __assign(__assign(__assign({}, this.defaultContext), newContext), { clientAwareness: this.clientAwareness });\n };\n return QueryManager;\n}());\nexport { QueryManager };\n//# sourceMappingURL=QueryManager.js.map","import { Kind } from './kinds.mjs';\nexport function isDefinitionNode(node) {\n return (\n isExecutableDefinitionNode(node) ||\n isTypeSystemDefinitionNode(node) ||\n isTypeSystemExtensionNode(node)\n );\n}\nexport function isExecutableDefinitionNode(node) {\n return (\n node.kind === Kind.OPERATION_DEFINITION ||\n node.kind === Kind.FRAGMENT_DEFINITION\n );\n}\nexport function isSelectionNode(node) {\n return (\n node.kind === Kind.FIELD ||\n node.kind === Kind.FRAGMENT_SPREAD ||\n node.kind === Kind.INLINE_FRAGMENT\n );\n}\nexport function isValueNode(node) {\n return (\n node.kind === Kind.VARIABLE ||\n node.kind === Kind.INT ||\n node.kind === Kind.FLOAT ||\n node.kind === Kind.STRING ||\n node.kind === Kind.BOOLEAN ||\n node.kind === Kind.NULL ||\n node.kind === Kind.ENUM ||\n node.kind === Kind.LIST ||\n node.kind === Kind.OBJECT\n );\n}\nexport function isConstValueNode(node) {\n return (\n isValueNode(node) &&\n (node.kind === Kind.LIST\n ? node.values.some(isConstValueNode)\n : node.kind === Kind.OBJECT\n ? node.fields.some((field) => isConstValueNode(field.value))\n : node.kind !== Kind.VARIABLE)\n );\n}\nexport function isTypeNode(node) {\n return (\n node.kind === Kind.NAMED_TYPE ||\n node.kind === Kind.LIST_TYPE ||\n node.kind === Kind.NON_NULL_TYPE\n );\n}\nexport function isTypeSystemDefinitionNode(node) {\n return (\n node.kind === Kind.SCHEMA_DEFINITION ||\n isTypeDefinitionNode(node) ||\n node.kind === Kind.DIRECTIVE_DEFINITION\n );\n}\nexport function isTypeDefinitionNode(node) {\n return (\n node.kind === Kind.SCALAR_TYPE_DEFINITION ||\n node.kind === Kind.OBJECT_TYPE_DEFINITION ||\n node.kind === Kind.INTERFACE_TYPE_DEFINITION ||\n node.kind === Kind.UNION_TYPE_DEFINITION ||\n node.kind === Kind.ENUM_TYPE_DEFINITION ||\n node.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION\n );\n}\nexport function isTypeSystemExtensionNode(node) {\n return node.kind === Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);\n}\nexport function isTypeExtensionNode(node) {\n return (\n node.kind === Kind.SCALAR_TYPE_EXTENSION ||\n node.kind === Kind.OBJECT_TYPE_EXTENSION ||\n node.kind === Kind.INTERFACE_TYPE_EXTENSION ||\n node.kind === Kind.UNION_TYPE_EXTENSION ||\n node.kind === Kind.ENUM_TYPE_EXTENSION ||\n node.kind === Kind.INPUT_OBJECT_TYPE_EXTENSION\n );\n}\n","import { dep, Slot } from \"optimism\";\n// Contextual Slot that acquires its value when custom read functions are\n// called in Policies#readField.\nexport var cacheSlot = new Slot();\nvar cacheInfoMap = new WeakMap();\nfunction getCacheInfo(cache) {\n var info = cacheInfoMap.get(cache);\n if (!info) {\n cacheInfoMap.set(cache, (info = {\n vars: new Set(),\n dep: dep(),\n }));\n }\n return info;\n}\nexport function forgetCache(cache) {\n getCacheInfo(cache).vars.forEach(function (rv) { return rv.forgetCache(cache); });\n}\n// Calling forgetCache(cache) serves to silence broadcasts and allows the\n// cache to be garbage collected. However, the varsByCache WeakMap\n// preserves the set of reactive variables that were previously associated\n// with this cache, which makes it possible to \"recall\" the cache at a\n// later time, by reattaching it to those variables. If the cache has been\n// garbage collected in the meantime, because it is no longer reachable,\n// you won't be able to call recallCache(cache), and the cache will\n// automatically disappear from the varsByCache WeakMap.\nexport function recallCache(cache) {\n getCacheInfo(cache).vars.forEach(function (rv) { return rv.attachCache(cache); });\n}\nexport function makeVar(value) {\n var caches = new Set();\n var listeners = new Set();\n var rv = function (newValue) {\n if (arguments.length > 0) {\n if (value !== newValue) {\n value = newValue;\n caches.forEach(function (cache) {\n // Invalidate any fields with custom read functions that\n // consumed this variable, so query results involving those\n // fields will be recomputed the next time we read them.\n getCacheInfo(cache).dep.dirty(rv);\n // Broadcast changes to any caches that have previously read\n // from this variable.\n broadcast(cache);\n });\n // Finally, notify any listeners added via rv.onNextChange.\n var oldListeners = Array.from(listeners);\n listeners.clear();\n oldListeners.forEach(function (listener) { return listener(value); });\n }\n }\n else {\n // When reading from the variable, obtain the current cache from\n // context via cacheSlot. This isn't entirely foolproof, but it's\n // the same system that powers varDep.\n var cache = cacheSlot.getValue();\n if (cache) {\n attach(cache);\n getCacheInfo(cache).dep(rv);\n }\n }\n return value;\n };\n rv.onNextChange = function (listener) {\n listeners.add(listener);\n return function () {\n listeners.delete(listener);\n };\n };\n var attach = (rv.attachCache = function (cache) {\n caches.add(cache);\n getCacheInfo(cache).vars.add(rv);\n return rv;\n });\n rv.forgetCache = function (cache) { return caches.delete(cache); };\n return rv;\n}\nfunction broadcast(cache) {\n if (cache.broadcastWatches) {\n cache.broadcastWatches();\n }\n}\n//# sourceMappingURL=reactiveVars.js.map","import { __assign, __awaiter, __generator } from \"tslib\";\nimport { invariant } from \"../utilities/globals/index.js\";\nimport { visit, BREAK, isSelectionNode } from \"graphql\";\nimport { argumentsObjectFromField, buildQueryFromSelectionSet, createFragmentMap, getFragmentDefinitions, getMainDefinition, hasDirectives, isField, isInlineFragment, mergeDeep, mergeDeepArray, removeClientSetsFromDocument, resultKeyNameFromField, shouldInclude, } from \"../utilities/index.js\";\nimport { cacheSlot } from \"../cache/index.js\";\nvar LocalState = /** @class */ (function () {\n function LocalState(_a) {\n var cache = _a.cache, client = _a.client, resolvers = _a.resolvers, fragmentMatcher = _a.fragmentMatcher;\n this.selectionsToResolveCache = new WeakMap();\n this.cache = cache;\n if (client) {\n this.client = client;\n }\n if (resolvers) {\n this.addResolvers(resolvers);\n }\n if (fragmentMatcher) {\n this.setFragmentMatcher(fragmentMatcher);\n }\n }\n LocalState.prototype.addResolvers = function (resolvers) {\n var _this = this;\n this.resolvers = this.resolvers || {};\n if (Array.isArray(resolvers)) {\n resolvers.forEach(function (resolverGroup) {\n _this.resolvers = mergeDeep(_this.resolvers, resolverGroup);\n });\n }\n else {\n this.resolvers = mergeDeep(this.resolvers, resolvers);\n }\n };\n LocalState.prototype.setResolvers = function (resolvers) {\n this.resolvers = {};\n this.addResolvers(resolvers);\n };\n LocalState.prototype.getResolvers = function () {\n return this.resolvers || {};\n };\n // Run local client resolvers against the incoming query and remote data.\n // Locally resolved field values are merged with the incoming remote data,\n // and returned. Note that locally resolved fields will overwrite\n // remote data using the same field name.\n LocalState.prototype.runResolvers = function (_a) {\n return __awaiter(this, arguments, void 0, function (_b) {\n var document = _b.document, remoteResult = _b.remoteResult, context = _b.context, variables = _b.variables, _c = _b.onlyRunForcedResolvers, onlyRunForcedResolvers = _c === void 0 ? false : _c;\n return __generator(this, function (_d) {\n if (document) {\n return [2 /*return*/, this.resolveDocument(document, remoteResult.data, context, variables, this.fragmentMatcher, onlyRunForcedResolvers).then(function (localResult) { return (__assign(__assign({}, remoteResult), { data: localResult.result })); })];\n }\n return [2 /*return*/, remoteResult];\n });\n });\n };\n LocalState.prototype.setFragmentMatcher = function (fragmentMatcher) {\n this.fragmentMatcher = fragmentMatcher;\n };\n LocalState.prototype.getFragmentMatcher = function () {\n return this.fragmentMatcher;\n };\n // Client queries contain everything in the incoming document (if a @client\n // directive is found).\n LocalState.prototype.clientQuery = function (document) {\n if (hasDirectives([\"client\"], document)) {\n if (this.resolvers) {\n return document;\n }\n }\n return null;\n };\n // Server queries are stripped of all @client based selection sets.\n LocalState.prototype.serverQuery = function (document) {\n return removeClientSetsFromDocument(document);\n };\n LocalState.prototype.prepareContext = function (context) {\n var cache = this.cache;\n return __assign(__assign({}, context), { cache: cache, \n // Getting an entry's cache key is useful for local state resolvers.\n getCacheKey: function (obj) {\n return cache.identify(obj);\n } });\n };\n // To support `@client @export(as: \"someVar\")` syntax, we'll first resolve\n // @client @export fields locally, then pass the resolved values back to be\n // used alongside the original operation variables.\n LocalState.prototype.addExportedVariables = function (document_1) {\n return __awaiter(this, arguments, void 0, function (document, variables, context) {\n if (variables === void 0) { variables = {}; }\n if (context === void 0) { context = {}; }\n return __generator(this, function (_a) {\n if (document) {\n return [2 /*return*/, this.resolveDocument(document, this.buildRootValueFromCache(document, variables) || {}, this.prepareContext(context), variables).then(function (data) { return (__assign(__assign({}, variables), data.exportedVariables)); })];\n }\n return [2 /*return*/, __assign({}, variables)];\n });\n });\n };\n LocalState.prototype.shouldForceResolvers = function (document) {\n var forceResolvers = false;\n visit(document, {\n Directive: {\n enter: function (node) {\n if (node.name.value === \"client\" && node.arguments) {\n forceResolvers = node.arguments.some(function (arg) {\n return arg.name.value === \"always\" &&\n arg.value.kind === \"BooleanValue\" &&\n arg.value.value === true;\n });\n if (forceResolvers) {\n return BREAK;\n }\n }\n },\n },\n });\n return forceResolvers;\n };\n // Query the cache and return matching data.\n LocalState.prototype.buildRootValueFromCache = function (document, variables) {\n return this.cache.diff({\n query: buildQueryFromSelectionSet(document),\n variables: variables,\n returnPartialData: true,\n optimistic: false,\n }).result;\n };\n LocalState.prototype.resolveDocument = function (document_1, rootValue_1) {\n return __awaiter(this, arguments, void 0, function (document, rootValue, context, variables, fragmentMatcher, onlyRunForcedResolvers) {\n var mainDefinition, fragments, fragmentMap, selectionsToResolve, definitionOperation, defaultOperationType, _a, cache, client, execContext, isClientFieldDescendant;\n if (context === void 0) { context = {}; }\n if (variables === void 0) { variables = {}; }\n if (fragmentMatcher === void 0) { fragmentMatcher = function () { return true; }; }\n if (onlyRunForcedResolvers === void 0) { onlyRunForcedResolvers = false; }\n return __generator(this, function (_b) {\n mainDefinition = getMainDefinition(document);\n fragments = getFragmentDefinitions(document);\n fragmentMap = createFragmentMap(fragments);\n selectionsToResolve = this.collectSelectionsToResolve(mainDefinition, fragmentMap);\n definitionOperation = mainDefinition.operation;\n defaultOperationType = definitionOperation ?\n definitionOperation.charAt(0).toUpperCase() +\n definitionOperation.slice(1)\n : \"Query\";\n _a = this, cache = _a.cache, client = _a.client;\n execContext = {\n fragmentMap: fragmentMap,\n context: __assign(__assign({}, context), { cache: cache, client: client }),\n variables: variables,\n fragmentMatcher: fragmentMatcher,\n defaultOperationType: defaultOperationType,\n exportedVariables: {},\n selectionsToResolve: selectionsToResolve,\n onlyRunForcedResolvers: onlyRunForcedResolvers,\n };\n isClientFieldDescendant = false;\n return [2 /*return*/, this.resolveSelectionSet(mainDefinition.selectionSet, isClientFieldDescendant, rootValue, execContext).then(function (result) { return ({\n result: result,\n exportedVariables: execContext.exportedVariables,\n }); })];\n });\n });\n };\n LocalState.prototype.resolveSelectionSet = function (selectionSet, isClientFieldDescendant, rootValue, execContext) {\n return __awaiter(this, void 0, void 0, function () {\n var fragmentMap, context, variables, resultsToMerge, execute;\n var _this = this;\n return __generator(this, function (_a) {\n fragmentMap = execContext.fragmentMap, context = execContext.context, variables = execContext.variables;\n resultsToMerge = [rootValue];\n execute = function (selection) { return __awaiter(_this, void 0, void 0, function () {\n var fragment, typeCondition;\n return __generator(this, function (_a) {\n if (!isClientFieldDescendant &&\n !execContext.selectionsToResolve.has(selection)) {\n // Skip selections without @client directives\n // (still processing if one of the ancestors or one of the child fields has @client directive)\n return [2 /*return*/];\n }\n if (!shouldInclude(selection, variables)) {\n // Skip this entirely.\n return [2 /*return*/];\n }\n if (isField(selection)) {\n return [2 /*return*/, this.resolveField(selection, isClientFieldDescendant, rootValue, execContext).then(function (fieldResult) {\n var _a;\n if (typeof fieldResult !== \"undefined\") {\n resultsToMerge.push((_a = {},\n _a[resultKeyNameFromField(selection)] = fieldResult,\n _a));\n }\n })];\n }\n if (isInlineFragment(selection)) {\n fragment = selection;\n }\n else {\n // This is a named fragment.\n fragment = fragmentMap[selection.name.value];\n invariant(fragment, 18, selection.name.value);\n }\n if (fragment && fragment.typeCondition) {\n typeCondition = fragment.typeCondition.name.value;\n if (execContext.fragmentMatcher(rootValue, typeCondition, context)) {\n return [2 /*return*/, this.resolveSelectionSet(fragment.selectionSet, isClientFieldDescendant, rootValue, execContext).then(function (fragmentResult) {\n resultsToMerge.push(fragmentResult);\n })];\n }\n }\n return [2 /*return*/];\n });\n }); };\n return [2 /*return*/, Promise.all(selectionSet.selections.map(execute)).then(function () {\n return mergeDeepArray(resultsToMerge);\n })];\n });\n });\n };\n LocalState.prototype.resolveField = function (field, isClientFieldDescendant, rootValue, execContext) {\n return __awaiter(this, void 0, void 0, function () {\n var variables, fieldName, aliasedFieldName, aliasUsed, defaultResult, resultPromise, resolverType, resolverMap, resolve;\n var _this = this;\n return __generator(this, function (_a) {\n if (!rootValue) {\n return [2 /*return*/, null];\n }\n variables = execContext.variables;\n fieldName = field.name.value;\n aliasedFieldName = resultKeyNameFromField(field);\n aliasUsed = fieldName !== aliasedFieldName;\n defaultResult = rootValue[aliasedFieldName] || rootValue[fieldName];\n resultPromise = Promise.resolve(defaultResult);\n // Usually all local resolvers are run when passing through here, but\n // if we've specifically identified that we only want to run forced\n // resolvers (that is, resolvers for fields marked with\n // `@client(always: true)`), then we'll skip running non-forced resolvers.\n if (!execContext.onlyRunForcedResolvers ||\n this.shouldForceResolvers(field)) {\n resolverType = rootValue.__typename || execContext.defaultOperationType;\n resolverMap = this.resolvers && this.resolvers[resolverType];\n if (resolverMap) {\n resolve = resolverMap[aliasUsed ? fieldName : aliasedFieldName];\n if (resolve) {\n resultPromise = Promise.resolve(\n // In case the resolve function accesses reactive variables,\n // set cacheSlot to the current cache instance.\n cacheSlot.withValue(this.cache, resolve, [\n rootValue,\n argumentsObjectFromField(field, variables),\n execContext.context,\n { field: field, fragmentMap: execContext.fragmentMap },\n ]));\n }\n }\n }\n return [2 /*return*/, resultPromise.then(function (result) {\n var _a, _b;\n if (result === void 0) { result = defaultResult; }\n // If an @export directive is associated with the current field, store\n // the `as` export variable name and current result for later use.\n if (field.directives) {\n field.directives.forEach(function (directive) {\n if (directive.name.value === \"export\" && directive.arguments) {\n directive.arguments.forEach(function (arg) {\n if (arg.name.value === \"as\" && arg.value.kind === \"StringValue\") {\n execContext.exportedVariables[arg.value.value] = result;\n }\n });\n }\n });\n }\n // Handle all scalar types here.\n if (!field.selectionSet) {\n return result;\n }\n // From here down, the field has a selection set, which means it's trying\n // to query a GraphQLObjectType.\n if (result == null) {\n // Basically any field in a GraphQL response can be null, or missing\n return result;\n }\n var isClientField = (_b = (_a = field.directives) === null || _a === void 0 ? void 0 : _a.some(function (d) { return d.name.value === \"client\"; })) !== null && _b !== void 0 ? _b : false;\n if (Array.isArray(result)) {\n return _this.resolveSubSelectedArray(field, isClientFieldDescendant || isClientField, result, execContext);\n }\n // Returned value is an object, and the query has a sub-selection. Recurse.\n if (field.selectionSet) {\n return _this.resolveSelectionSet(field.selectionSet, isClientFieldDescendant || isClientField, result, execContext);\n }\n })];\n });\n });\n };\n LocalState.prototype.resolveSubSelectedArray = function (field, isClientFieldDescendant, result, execContext) {\n var _this = this;\n return Promise.all(result.map(function (item) {\n if (item === null) {\n return null;\n }\n // This is a nested array, recurse.\n if (Array.isArray(item)) {\n return _this.resolveSubSelectedArray(field, isClientFieldDescendant, item, execContext);\n }\n // This is an object, run the selection set on it.\n if (field.selectionSet) {\n return _this.resolveSelectionSet(field.selectionSet, isClientFieldDescendant, item, execContext);\n }\n }));\n };\n // Collect selection nodes on paths from document root down to all @client directives.\n // This function takes into account transitive fragment spreads.\n // Complexity equals to a single `visit` over the full document.\n LocalState.prototype.collectSelectionsToResolve = function (mainDefinition, fragmentMap) {\n var isSingleASTNode = function (node) { return !Array.isArray(node); };\n var selectionsToResolveCache = this.selectionsToResolveCache;\n function collectByDefinition(definitionNode) {\n if (!selectionsToResolveCache.has(definitionNode)) {\n var matches_1 = new Set();\n selectionsToResolveCache.set(definitionNode, matches_1);\n visit(definitionNode, {\n Directive: function (node, _, __, ___, ancestors) {\n if (node.name.value === \"client\") {\n ancestors.forEach(function (node) {\n if (isSingleASTNode(node) && isSelectionNode(node)) {\n matches_1.add(node);\n }\n });\n }\n },\n FragmentSpread: function (spread, _, __, ___, ancestors) {\n var fragment = fragmentMap[spread.name.value];\n invariant(fragment, 19, spread.name.value);\n var fragmentSelections = collectByDefinition(fragment);\n if (fragmentSelections.size > 0) {\n // Fragment for this spread contains @client directive (either directly or transitively)\n // Collect selection nodes on paths from the root down to fields with the @client directive\n ancestors.forEach(function (node) {\n if (isSingleASTNode(node) && isSelectionNode(node)) {\n matches_1.add(node);\n }\n });\n matches_1.add(spread);\n fragmentSelections.forEach(function (selection) {\n matches_1.add(selection);\n });\n }\n },\n });\n }\n return selectionsToResolveCache.get(definitionNode);\n }\n return collectByDefinition(mainDefinition);\n };\n return LocalState;\n}());\nexport { LocalState };\n//# sourceMappingURL=LocalState.js.map","import { __assign } from \"tslib\";\nimport { invariant, newInvariantError } from \"../utilities/globals/index.js\";\nimport { ApolloLink, execute } from \"../link/core/index.js\";\nimport { version } from \"../version.js\";\nimport { HttpLink } from \"../link/http/index.js\";\nimport { QueryManager } from \"./QueryManager.js\";\nimport { LocalState } from \"./LocalState.js\";\nvar hasSuggestedDevtools = false;\n// Though mergeOptions now resides in @apollo/client/utilities, it was\n// previously declared and exported from this module, and then reexported from\n// @apollo/client/core. Since we need to preserve that API anyway, the easiest\n// solution is to reexport mergeOptions where it was previously declared (here).\nimport { mergeOptions } from \"../utilities/index.js\";\nimport { getApolloClientMemoryInternals } from \"../utilities/caching/getMemoryInternals.js\";\nexport { mergeOptions };\n/**\n * This is the primary Apollo Client class. It is used to send GraphQL documents (i.e. queries\n * and mutations) to a GraphQL spec-compliant server over an `ApolloLink` instance,\n * receive results from the server and cache the results in a store. It also delivers updates\n * to GraphQL queries through `Observable` instances.\n */\nvar ApolloClient = /** @class */ (function () {\n /**\n * Constructs an instance of `ApolloClient`.\n *\n * @example\n * ```js\n * import { ApolloClient, InMemoryCache } from '@apollo/client';\n *\n * const cache = new InMemoryCache();\n *\n * const client = new ApolloClient({\n * // Provide required constructor fields\n * cache: cache,\n * uri: 'http://localhost:4000/',\n *\n * // Provide some optional constructor fields\n * name: 'react-web-client',\n * version: '1.3',\n * queryDeduplication: false,\n * defaultOptions: {\n * watchQuery: {\n * fetchPolicy: 'cache-and-network',\n * },\n * },\n * });\n * ```\n */\n function ApolloClient(options) {\n var _this = this;\n this.resetStoreCallbacks = [];\n this.clearStoreCallbacks = [];\n if (!options.cache) {\n throw newInvariantError(15);\n }\n var uri = options.uri, credentials = options.credentials, headers = options.headers, cache = options.cache, documentTransform = options.documentTransform, _a = options.ssrMode, ssrMode = _a === void 0 ? false : _a, _b = options.ssrForceFetchDelay, ssrForceFetchDelay = _b === void 0 ? 0 : _b, \n // Expose the client instance as window.__APOLLO_CLIENT__ and call\n // onBroadcast in queryManager.broadcastQueries to enable browser\n // devtools, but disable them by default in production.\n connectToDevTools = options.connectToDevTools, _c = options.queryDeduplication, queryDeduplication = _c === void 0 ? true : _c, defaultOptions = options.defaultOptions, defaultContext = options.defaultContext, _d = options.assumeImmutableResults, assumeImmutableResults = _d === void 0 ? cache.assumeImmutableResults : _d, resolvers = options.resolvers, typeDefs = options.typeDefs, fragmentMatcher = options.fragmentMatcher, clientAwarenessName = options.name, clientAwarenessVersion = options.version, devtools = options.devtools;\n var link = options.link;\n if (!link) {\n link =\n uri ? new HttpLink({ uri: uri, credentials: credentials, headers: headers }) : ApolloLink.empty();\n }\n this.link = link;\n this.cache = cache;\n this.disableNetworkFetches = ssrMode || ssrForceFetchDelay > 0;\n this.queryDeduplication = queryDeduplication;\n this.defaultOptions = defaultOptions || Object.create(null);\n this.typeDefs = typeDefs;\n this.devtoolsConfig = __assign(__assign({}, devtools), { enabled: (devtools === null || devtools === void 0 ? void 0 : devtools.enabled) || connectToDevTools });\n if (this.devtoolsConfig.enabled === undefined) {\n this.devtoolsConfig.enabled = globalThis.__DEV__ !== false;\n }\n if (ssrForceFetchDelay) {\n setTimeout(function () { return (_this.disableNetworkFetches = false); }, ssrForceFetchDelay);\n }\n this.watchQuery = this.watchQuery.bind(this);\n this.query = this.query.bind(this);\n this.mutate = this.mutate.bind(this);\n this.watchFragment = this.watchFragment.bind(this);\n this.resetStore = this.resetStore.bind(this);\n this.reFetchObservableQueries = this.reFetchObservableQueries.bind(this);\n this.version = version;\n this.localState = new LocalState({\n cache: cache,\n client: this,\n resolvers: resolvers,\n fragmentMatcher: fragmentMatcher,\n });\n this.queryManager = new QueryManager({\n cache: this.cache,\n link: this.link,\n defaultOptions: this.defaultOptions,\n defaultContext: defaultContext,\n documentTransform: documentTransform,\n queryDeduplication: queryDeduplication,\n ssrMode: ssrMode,\n clientAwareness: {\n name: clientAwarenessName,\n version: clientAwarenessVersion,\n },\n localState: this.localState,\n assumeImmutableResults: assumeImmutableResults,\n onBroadcast: this.devtoolsConfig.enabled ?\n function () {\n if (_this.devToolsHookCb) {\n _this.devToolsHookCb({\n action: {},\n state: {\n queries: _this.queryManager.getQueryStore(),\n mutations: _this.queryManager.mutationStore || {},\n },\n dataWithOptimisticResults: _this.cache.extract(true),\n });\n }\n }\n : void 0,\n });\n if (this.devtoolsConfig.enabled)\n this.connectToDevTools();\n }\n ApolloClient.prototype.connectToDevTools = function () {\n if (typeof window === \"undefined\") {\n return;\n }\n var windowWithDevTools = window;\n var devtoolsSymbol = Symbol.for(\"apollo.devtools\");\n (windowWithDevTools[devtoolsSymbol] =\n windowWithDevTools[devtoolsSymbol] || []).push(this);\n windowWithDevTools.__APOLLO_CLIENT__ = this;\n /**\n * Suggest installing the devtools for developers who don't have them\n */\n if (!hasSuggestedDevtools && globalThis.__DEV__ !== false) {\n hasSuggestedDevtools = true;\n if (window.document &&\n window.top === window.self &&\n /^(https?|file):$/.test(window.location.protocol)) {\n setTimeout(function () {\n if (!window.__APOLLO_DEVTOOLS_GLOBAL_HOOK__) {\n var nav = window.navigator;\n var ua = nav && nav.userAgent;\n var url = void 0;\n if (typeof ua === \"string\") {\n if (ua.indexOf(\"Chrome/\") > -1) {\n url =\n \"https://chrome.google.com/webstore/detail/\" +\n \"apollo-client-developer-t/jdkknkkbebbapilgoeccciglkfbmbnfm\";\n }\n else if (ua.indexOf(\"Firefox/\") > -1) {\n url =\n \"https://addons.mozilla.org/en-US/firefox/addon/apollo-developer-tools/\";\n }\n }\n if (url) {\n globalThis.__DEV__ !== false && invariant.log(\"Download the Apollo DevTools for a better development \" +\n \"experience: %s\", url);\n }\n }\n }, 10000);\n }\n }\n };\n Object.defineProperty(ApolloClient.prototype, \"documentTransform\", {\n /**\n * The `DocumentTransform` used to modify GraphQL documents before a request\n * is made. If a custom `DocumentTransform` is not provided, this will be the\n * default document transform.\n */\n get: function () {\n return this.queryManager.documentTransform;\n },\n enumerable: false,\n configurable: true\n });\n /**\n * Call this method to terminate any active client processes, making it safe\n * to dispose of this `ApolloClient` instance.\n */\n ApolloClient.prototype.stop = function () {\n this.queryManager.stop();\n };\n /**\n * This watches the cache store of the query according to the options specified and\n * returns an `ObservableQuery`. We can subscribe to this `ObservableQuery` and\n * receive updated results through an observer when the cache store changes.\n *\n * Note that this method is not an implementation of GraphQL subscriptions. Rather,\n * it uses Apollo's store in order to reactively deliver updates to your query results.\n *\n * For example, suppose you call watchQuery on a GraphQL query that fetches a person's\n * first and last name and this person has a particular object identifier, provided by\n * dataIdFromObject. Later, a different query fetches that same person's\n * first and last name and the first name has now changed. Then, any observers associated\n * with the results of the first query will be updated with a new result object.\n *\n * Note that if the cache does not change, the subscriber will *not* be notified.\n *\n * See [here](https://medium.com/apollo-stack/the-concepts-of-graphql-bc68bd819be3#.3mb0cbcmc) for\n * a description of store reactivity.\n */\n ApolloClient.prototype.watchQuery = function (options) {\n if (this.defaultOptions.watchQuery) {\n options = mergeOptions(this.defaultOptions.watchQuery, options);\n }\n // XXX Overwriting options is probably not the best way to do this long term...\n if (this.disableNetworkFetches &&\n (options.fetchPolicy === \"network-only\" ||\n options.fetchPolicy === \"cache-and-network\")) {\n options = __assign(__assign({}, options), { fetchPolicy: \"cache-first\" });\n }\n return this.queryManager.watchQuery(options);\n };\n /**\n * This resolves a single query according to the options specified and\n * returns a `Promise` which is either resolved with the resulting data\n * or rejected with an error.\n *\n * @param options - An object of type `QueryOptions` that allows us to\n * describe how this query should be treated e.g. whether it should hit the\n * server at all or just resolve from the cache, etc.\n */\n ApolloClient.prototype.query = function (options) {\n if (this.defaultOptions.query) {\n options = mergeOptions(this.defaultOptions.query, options);\n }\n invariant(options.fetchPolicy !== \"cache-and-network\", 16);\n if (this.disableNetworkFetches && options.fetchPolicy === \"network-only\") {\n options = __assign(__assign({}, options), { fetchPolicy: \"cache-first\" });\n }\n return this.queryManager.query(options);\n };\n /**\n * This resolves a single mutation according to the options specified and returns a\n * Promise which is either resolved with the resulting data or rejected with an\n * error. In some cases both `data` and `errors` might be undefined, for example\n * when `errorPolicy` is set to `'ignore'`.\n *\n * It takes options as an object with the following keys and values:\n */\n ApolloClient.prototype.mutate = function (options) {\n if (this.defaultOptions.mutate) {\n options = mergeOptions(this.defaultOptions.mutate, options);\n }\n return this.queryManager.mutate(options);\n };\n /**\n * This subscribes to a graphql subscription according to the options specified and returns an\n * `Observable` which either emits received data or an error.\n */\n ApolloClient.prototype.subscribe = function (options) {\n return this.queryManager.startGraphQLSubscription(options);\n };\n /**\n * Tries to read some data from the store in the shape of the provided\n * GraphQL query without making a network request. This method will start at\n * the root query. To start at a specific id returned by `dataIdFromObject`\n * use `readFragment`.\n *\n * @param optimistic - Set to `true` to allow `readQuery` to return\n * optimistic results. Is `false` by default.\n */\n ApolloClient.prototype.readQuery = function (options, optimistic) {\n if (optimistic === void 0) { optimistic = false; }\n return this.cache.readQuery(options, optimistic);\n };\n /**\n * Watches the cache store of the fragment according to the options specified\n * and returns an `Observable`. We can subscribe to this\n * `Observable` and receive updated results through an\n * observer when the cache store changes.\n *\n * You must pass in a GraphQL document with a single fragment or a document\n * with multiple fragments that represent what you are reading. If you pass\n * in a document with multiple fragments then you must also specify a\n * `fragmentName`.\n *\n * @since 3.10.0\n * @param options - An object of type `WatchFragmentOptions` that allows\n * the cache to identify the fragment and optionally specify whether to react\n * to optimistic updates.\n */\n ApolloClient.prototype.watchFragment = function (options) {\n return this.cache.watchFragment(options);\n };\n /**\n * Tries to read some data from the store in the shape of the provided\n * GraphQL fragment without making a network request. This method will read a\n * GraphQL fragment from any arbitrary id that is currently cached, unlike\n * `readQuery` which will only read from the root query.\n *\n * You must pass in a GraphQL document with a single fragment or a document\n * with multiple fragments that represent what you are reading. If you pass\n * in a document with multiple fragments then you must also specify a\n * `fragmentName`.\n *\n * @param optimistic - Set to `true` to allow `readFragment` to return\n * optimistic results. Is `false` by default.\n */\n ApolloClient.prototype.readFragment = function (options, optimistic) {\n if (optimistic === void 0) { optimistic = false; }\n return this.cache.readFragment(options, optimistic);\n };\n /**\n * Writes some data in the shape of the provided GraphQL query directly to\n * the store. This method will start at the root query. To start at a\n * specific id returned by `dataIdFromObject` then use `writeFragment`.\n */\n ApolloClient.prototype.writeQuery = function (options) {\n var ref = this.cache.writeQuery(options);\n if (options.broadcast !== false) {\n this.queryManager.broadcastQueries();\n }\n return ref;\n };\n /**\n * Writes some data in the shape of the provided GraphQL fragment directly to\n * the store. This method will write to a GraphQL fragment from any arbitrary\n * id that is currently cached, unlike `writeQuery` which will only write\n * from the root query.\n *\n * You must pass in a GraphQL document with a single fragment or a document\n * with multiple fragments that represent what you are writing. If you pass\n * in a document with multiple fragments then you must also specify a\n * `fragmentName`.\n */\n ApolloClient.prototype.writeFragment = function (options) {\n var ref = this.cache.writeFragment(options);\n if (options.broadcast !== false) {\n this.queryManager.broadcastQueries();\n }\n return ref;\n };\n ApolloClient.prototype.__actionHookForDevTools = function (cb) {\n this.devToolsHookCb = cb;\n };\n ApolloClient.prototype.__requestRaw = function (payload) {\n return execute(this.link, payload);\n };\n /**\n * Resets your entire store by clearing out your cache and then re-executing\n * all of your active queries. This makes it so that you may guarantee that\n * there is no data left in your store from a time before you called this\n * method.\n *\n * `resetStore()` is useful when your user just logged out. You’ve removed the\n * user session, and you now want to make sure that any references to data you\n * might have fetched while the user session was active is gone.\n *\n * It is important to remember that `resetStore()` *will* refetch any active\n * queries. This means that any components that might be mounted will execute\n * their queries again using your network interface. If you do not want to\n * re-execute any queries then you should make sure to stop watching any\n * active queries.\n */\n ApolloClient.prototype.resetStore = function () {\n var _this = this;\n return Promise.resolve()\n .then(function () {\n return _this.queryManager.clearStore({\n discardWatches: false,\n });\n })\n .then(function () { return Promise.all(_this.resetStoreCallbacks.map(function (fn) { return fn(); })); })\n .then(function () { return _this.reFetchObservableQueries(); });\n };\n /**\n * Remove all data from the store. Unlike `resetStore`, `clearStore` will\n * not refetch any active queries.\n */\n ApolloClient.prototype.clearStore = function () {\n var _this = this;\n return Promise.resolve()\n .then(function () {\n return _this.queryManager.clearStore({\n discardWatches: true,\n });\n })\n .then(function () { return Promise.all(_this.clearStoreCallbacks.map(function (fn) { return fn(); })); });\n };\n /**\n * Allows callbacks to be registered that are executed when the store is\n * reset. `onResetStore` returns an unsubscribe function that can be used\n * to remove registered callbacks.\n */\n ApolloClient.prototype.onResetStore = function (cb) {\n var _this = this;\n this.resetStoreCallbacks.push(cb);\n return function () {\n _this.resetStoreCallbacks = _this.resetStoreCallbacks.filter(function (c) { return c !== cb; });\n };\n };\n /**\n * Allows callbacks to be registered that are executed when the store is\n * cleared. `onClearStore` returns an unsubscribe function that can be used\n * to remove registered callbacks.\n */\n ApolloClient.prototype.onClearStore = function (cb) {\n var _this = this;\n this.clearStoreCallbacks.push(cb);\n return function () {\n _this.clearStoreCallbacks = _this.clearStoreCallbacks.filter(function (c) { return c !== cb; });\n };\n };\n /**\n * Refetches all of your active queries.\n *\n * `reFetchObservableQueries()` is useful if you want to bring the client back to proper state in case of a network outage\n *\n * It is important to remember that `reFetchObservableQueries()` *will* refetch any active\n * queries. This means that any components that might be mounted will execute\n * their queries again using your network interface. If you do not want to\n * re-execute any queries then you should make sure to stop watching any\n * active queries.\n * Takes optional parameter `includeStandby` which will include queries in standby-mode when refetching.\n */\n ApolloClient.prototype.reFetchObservableQueries = function (includeStandby) {\n return this.queryManager.reFetchObservableQueries(includeStandby);\n };\n /**\n * Refetches specified active queries. Similar to \"reFetchObservableQueries()\" but with a specific list of queries.\n *\n * `refetchQueries()` is useful for use cases to imperatively refresh a selection of queries.\n *\n * It is important to remember that `refetchQueries()` *will* refetch specified active\n * queries. This means that any components that might be mounted will execute\n * their queries again using your network interface. If you do not want to\n * re-execute any queries then you should make sure to stop watching any\n * active queries.\n */\n ApolloClient.prototype.refetchQueries = function (options) {\n var map = this.queryManager.refetchQueries(options);\n var queries = [];\n var results = [];\n map.forEach(function (result, obsQuery) {\n queries.push(obsQuery);\n results.push(result);\n });\n var result = Promise.all(results);\n // In case you need the raw results immediately, without awaiting\n // Promise.all(results):\n result.queries = queries;\n result.results = results;\n // If you decide to ignore the result Promise because you're using\n // result.queries and result.results instead, you shouldn't have to worry\n // about preventing uncaught rejections for the Promise.all result.\n result.catch(function (error) {\n globalThis.__DEV__ !== false && invariant.debug(17, error);\n });\n return result;\n };\n /**\n * Get all currently active `ObservableQuery` objects, in a `Map` keyed by\n * query ID strings.\n *\n * An \"active\" query is one that has observers and a `fetchPolicy` other than\n * \"standby\" or \"cache-only\".\n *\n * You can include all `ObservableQuery` objects (including the inactive ones)\n * by passing \"all\" instead of \"active\", or you can include just a subset of\n * active queries by passing an array of query names or DocumentNode objects.\n */\n ApolloClient.prototype.getObservableQueries = function (include) {\n if (include === void 0) { include = \"active\"; }\n return this.queryManager.getObservableQueries(include);\n };\n /**\n * Exposes the cache's complete state, in a serializable format for later restoration.\n */\n ApolloClient.prototype.extract = function (optimistic) {\n return this.cache.extract(optimistic);\n };\n /**\n * Replaces existing state in the cache (if any) with the values expressed by\n * `serializedState`.\n *\n * Called when hydrating a cache (server side rendering, or offline storage),\n * and also (potentially) during hot reloads.\n */\n ApolloClient.prototype.restore = function (serializedState) {\n return this.cache.restore(serializedState);\n };\n /**\n * Add additional local resolvers.\n */\n ApolloClient.prototype.addResolvers = function (resolvers) {\n this.localState.addResolvers(resolvers);\n };\n /**\n * Set (override existing) local resolvers.\n */\n ApolloClient.prototype.setResolvers = function (resolvers) {\n this.localState.setResolvers(resolvers);\n };\n /**\n * Get all registered local resolvers.\n */\n ApolloClient.prototype.getResolvers = function () {\n return this.localState.getResolvers();\n };\n /**\n * Set a custom local state fragment matcher.\n */\n ApolloClient.prototype.setLocalStateFragmentMatcher = function (fragmentMatcher) {\n this.localState.setFragmentMatcher(fragmentMatcher);\n };\n /**\n * Define a new ApolloLink (or link chain) that Apollo Client will use.\n */\n ApolloClient.prototype.setLink = function (newLink) {\n this.link = this.queryManager.link = newLink;\n };\n Object.defineProperty(ApolloClient.prototype, \"defaultContext\", {\n get: function () {\n return this.queryManager.defaultContext;\n },\n enumerable: false,\n configurable: true\n });\n return ApolloClient;\n}());\nexport { ApolloClient };\nif (globalThis.__DEV__ !== false) {\n ApolloClient.prototype.getMemoryInternals = getApolloClientMemoryInternals;\n}\n//# sourceMappingURL=ApolloClient.js.map","import { __assign, __rest } from \"tslib\";\nimport { wrap } from \"optimism\";\nimport { Observable, cacheSizes, getFragmentQueryDocument, mergeDeepArray, } from \"../../utilities/index.js\";\nimport { WeakCache } from \"@wry/caches\";\nimport { getApolloCacheMemoryInternals } from \"../../utilities/caching/getMemoryInternals.js\";\nimport { equalByQuery } from \"../../core/equalByQuery.js\";\nvar ApolloCache = /** @class */ (function () {\n function ApolloCache() {\n this.assumeImmutableResults = false;\n // Make sure we compute the same (===) fragment query document every\n // time we receive the same fragment in readFragment.\n this.getFragmentDoc = wrap(getFragmentQueryDocument, {\n max: cacheSizes[\"cache.fragmentQueryDocuments\"] ||\n 1000 /* defaultCacheSizes[\"cache.fragmentQueryDocuments\"] */,\n cache: WeakCache,\n });\n }\n // Transactional API\n // The batch method is intended to replace/subsume both performTransaction\n // and recordOptimisticTransaction, but performTransaction came first, so we\n // provide a default batch implementation that's just another way of calling\n // performTransaction. Subclasses of ApolloCache (such as InMemoryCache) can\n // override the batch method to do more interesting things with its options.\n ApolloCache.prototype.batch = function (options) {\n var _this = this;\n var optimisticId = typeof options.optimistic === \"string\" ? options.optimistic\n : options.optimistic === false ? null\n : void 0;\n var updateResult;\n this.performTransaction(function () { return (updateResult = options.update(_this)); }, optimisticId);\n return updateResult;\n };\n ApolloCache.prototype.recordOptimisticTransaction = function (transaction, optimisticId) {\n this.performTransaction(transaction, optimisticId);\n };\n // Optional API\n // Called once per input document, allowing the cache to make static changes\n // to the query, such as adding __typename fields.\n ApolloCache.prototype.transformDocument = function (document) {\n return document;\n };\n // Called before each ApolloLink request, allowing the cache to make dynamic\n // changes to the query, such as filling in missing fragment definitions.\n ApolloCache.prototype.transformForLink = function (document) {\n return document;\n };\n ApolloCache.prototype.identify = function (object) {\n return;\n };\n ApolloCache.prototype.gc = function () {\n return [];\n };\n ApolloCache.prototype.modify = function (options) {\n return false;\n };\n // DataProxy API\n ApolloCache.prototype.readQuery = function (options, optimistic) {\n if (optimistic === void 0) { optimistic = !!options.optimistic; }\n return this.read(__assign(__assign({}, options), { rootId: options.id || \"ROOT_QUERY\", optimistic: optimistic }));\n };\n /** {@inheritDoc @apollo/client!ApolloClient#watchFragment:member(1)} */\n ApolloCache.prototype.watchFragment = function (options) {\n var _this = this;\n var fragment = options.fragment, fragmentName = options.fragmentName, from = options.from, _a = options.optimistic, optimistic = _a === void 0 ? true : _a, otherOptions = __rest(options, [\"fragment\", \"fragmentName\", \"from\", \"optimistic\"]);\n var query = this.getFragmentDoc(fragment, fragmentName);\n var diffOptions = __assign(__assign({}, otherOptions), { returnPartialData: true, id: \n // While our TypeScript types do not allow for `undefined` as a valid\n // `from`, its possible `useFragment` gives us an `undefined` since it\n // calls` cache.identify` and provides that value to `from`. We are\n // adding this fix here however to ensure those using plain JavaScript\n // and using `cache.identify` themselves will avoid seeing the obscure\n // warning.\n typeof from === \"undefined\" || typeof from === \"string\" ?\n from\n : this.identify(from), query: query, optimistic: optimistic });\n var latestDiff;\n return new Observable(function (observer) {\n return _this.watch(__assign(__assign({}, diffOptions), { immediate: true, callback: function (diff) {\n if (\n // Always ensure we deliver the first result\n latestDiff &&\n equalByQuery(query, { data: latestDiff === null || latestDiff === void 0 ? void 0 : latestDiff.result }, { data: diff.result })) {\n return;\n }\n var result = {\n data: diff.result,\n complete: !!diff.complete,\n };\n if (diff.missing) {\n result.missing = mergeDeepArray(diff.missing.map(function (error) { return error.missing; }));\n }\n latestDiff = diff;\n observer.next(result);\n } }));\n });\n };\n ApolloCache.prototype.readFragment = function (options, optimistic) {\n if (optimistic === void 0) { optimistic = !!options.optimistic; }\n return this.read(__assign(__assign({}, options), { query: this.getFragmentDoc(options.fragment, options.fragmentName), rootId: options.id, optimistic: optimistic }));\n };\n ApolloCache.prototype.writeQuery = function (_a) {\n var id = _a.id, data = _a.data, options = __rest(_a, [\"id\", \"data\"]);\n return this.write(Object.assign(options, {\n dataId: id || \"ROOT_QUERY\",\n result: data,\n }));\n };\n ApolloCache.prototype.writeFragment = function (_a) {\n var id = _a.id, data = _a.data, fragment = _a.fragment, fragmentName = _a.fragmentName, options = __rest(_a, [\"id\", \"data\", \"fragment\", \"fragmentName\"]);\n return this.write(Object.assign(options, {\n query: this.getFragmentDoc(fragment, fragmentName),\n dataId: id,\n result: data,\n }));\n };\n ApolloCache.prototype.updateQuery = function (options, update) {\n return this.batch({\n update: function (cache) {\n var value = cache.readQuery(options);\n var data = update(value);\n if (data === void 0 || data === null)\n return value;\n cache.writeQuery(__assign(__assign({}, options), { data: data }));\n return data;\n },\n });\n };\n ApolloCache.prototype.updateFragment = function (options, update) {\n return this.batch({\n update: function (cache) {\n var value = cache.readFragment(options);\n var data = update(value);\n if (data === void 0 || data === null)\n return value;\n cache.writeFragment(__assign(__assign({}, options), { data: data }));\n return data;\n },\n });\n };\n return ApolloCache;\n}());\nexport { ApolloCache };\nif (globalThis.__DEV__ !== false) {\n ApolloCache.prototype.getMemoryInternals = getApolloCacheMemoryInternals;\n}\n//# sourceMappingURL=cache.js.map","import { __extends } from \"tslib\";\nvar MissingFieldError = /** @class */ (function (_super) {\n __extends(MissingFieldError, _super);\n function MissingFieldError(message, path, query, variables) {\n var _a;\n // 'Error' breaks prototype chain here\n var _this = _super.call(this, message) || this;\n _this.message = message;\n _this.path = path;\n _this.query = query;\n _this.variables = variables;\n if (Array.isArray(_this.path)) {\n _this.missing = _this.message;\n for (var i = _this.path.length - 1; i >= 0; --i) {\n _this.missing = (_a = {}, _a[_this.path[i]] = _this.missing, _a);\n }\n }\n else {\n _this.missing = _this.path;\n }\n // We're not using `Object.setPrototypeOf` here as it isn't fully supported\n // on Android (see issue #3236).\n _this.__proto__ = MissingFieldError.prototype;\n return _this;\n }\n return MissingFieldError;\n}(Error));\nexport { MissingFieldError };\n//# sourceMappingURL=common.js.map","import { isReference, isField, DeepMerger, resultKeyNameFromField, shouldInclude, isNonNullObject, compact, createFragmentMap, getFragmentDefinitions, isArray, } from \"../../utilities/index.js\";\nexport var hasOwn = Object.prototype.hasOwnProperty;\nexport function isNullish(value) {\n return value === null || value === void 0;\n}\nexport { isArray };\nexport function defaultDataIdFromObject(_a, context) {\n var __typename = _a.__typename, id = _a.id, _id = _a._id;\n if (typeof __typename === \"string\") {\n if (context) {\n context.keyObject =\n !isNullish(id) ? { id: id }\n : !isNullish(_id) ? { _id: _id }\n : void 0;\n }\n // If there is no object.id, fall back to object._id.\n if (isNullish(id) && !isNullish(_id)) {\n id = _id;\n }\n if (!isNullish(id)) {\n return \"\".concat(__typename, \":\").concat(typeof id === \"number\" || typeof id === \"string\" ?\n id\n : JSON.stringify(id));\n }\n }\n}\nvar defaultConfig = {\n dataIdFromObject: defaultDataIdFromObject,\n addTypename: true,\n resultCaching: true,\n // Thanks to the shouldCanonizeResults helper, this should be the only line\n // you have to change to reenable canonization by default in the future.\n canonizeResults: false,\n};\nexport function normalizeConfig(config) {\n return compact(defaultConfig, config);\n}\nexport function shouldCanonizeResults(config) {\n var value = config.canonizeResults;\n return value === void 0 ? defaultConfig.canonizeResults : value;\n}\nexport function getTypenameFromStoreObject(store, objectOrReference) {\n return isReference(objectOrReference) ?\n store.get(objectOrReference.__ref, \"__typename\")\n : objectOrReference && objectOrReference.__typename;\n}\nexport var TypeOrFieldNameRegExp = /^[_a-z][_0-9a-z]*/i;\nexport function fieldNameFromStoreName(storeFieldName) {\n var match = storeFieldName.match(TypeOrFieldNameRegExp);\n return match ? match[0] : storeFieldName;\n}\nexport function selectionSetMatchesResult(selectionSet, result, variables) {\n if (isNonNullObject(result)) {\n return isArray(result) ?\n result.every(function (item) {\n return selectionSetMatchesResult(selectionSet, item, variables);\n })\n : selectionSet.selections.every(function (field) {\n if (isField(field) && shouldInclude(field, variables)) {\n var key = resultKeyNameFromField(field);\n return (hasOwn.call(result, key) &&\n (!field.selectionSet ||\n selectionSetMatchesResult(field.selectionSet, result[key], variables)));\n }\n // If the selection has been skipped with @skip(true) or\n // @include(false), it should not count against the matching. If\n // the selection is not a field, it must be a fragment (inline or\n // named). We will determine if selectionSetMatchesResult for that\n // fragment when we get to it, so for now we return true.\n return true;\n });\n }\n return false;\n}\nexport function storeValueIsStoreObject(value) {\n return isNonNullObject(value) && !isReference(value) && !isArray(value);\n}\nexport function makeProcessedFieldsMerger() {\n return new DeepMerger();\n}\nexport function extractFragmentContext(document, fragments) {\n // FragmentMap consisting only of fragments defined directly in document, not\n // including other fragments registered in the FragmentRegistry.\n var fragmentMap = createFragmentMap(getFragmentDefinitions(document));\n return {\n fragmentMap: fragmentMap,\n lookupFragment: function (name) {\n var def = fragmentMap[name];\n if (!def && fragments) {\n def = fragments.lookup(name);\n }\n return def || null;\n },\n };\n}\n//# sourceMappingURL=helpers.js.map","import { __assign, __extends, __rest } from \"tslib\";\nimport { invariant } from \"../../utilities/globals/index.js\";\nimport { dep } from \"optimism\";\nimport { equal } from \"@wry/equality\";\nimport { Trie } from \"@wry/trie\";\nimport { isReference, makeReference, DeepMerger, maybeDeepFreeze, canUseWeakMap, isNonNullObject, } from \"../../utilities/index.js\";\nimport { hasOwn, fieldNameFromStoreName } from \"./helpers.js\";\nvar DELETE = Object.create(null);\nvar delModifier = function () { return DELETE; };\nvar INVALIDATE = Object.create(null);\nvar EntityStore = /** @class */ (function () {\n function EntityStore(policies, group) {\n var _this = this;\n this.policies = policies;\n this.group = group;\n this.data = Object.create(null);\n // Maps root entity IDs to the number of times they have been retained, minus\n // the number of times they have been released. Retained entities keep other\n // entities they reference (even indirectly) from being garbage collected.\n this.rootIds = Object.create(null);\n // Lazily tracks { __ref: } strings contained by this.data[dataId].\n this.refs = Object.create(null);\n // Bound function that can be passed around to provide easy access to fields\n // of Reference objects as well as ordinary objects.\n this.getFieldValue = function (objectOrReference, storeFieldName) {\n return maybeDeepFreeze(isReference(objectOrReference) ?\n _this.get(objectOrReference.__ref, storeFieldName)\n : objectOrReference && objectOrReference[storeFieldName]);\n };\n // Returns true for non-normalized StoreObjects and non-dangling\n // References, indicating that readField(name, objOrRef) has a chance of\n // working. Useful for filtering out dangling references from lists.\n this.canRead = function (objOrRef) {\n return isReference(objOrRef) ?\n _this.has(objOrRef.__ref)\n : typeof objOrRef === \"object\";\n };\n // Bound function that converts an id or an object with a __typename and\n // primary key fields to a Reference object. If called with a Reference object,\n // that same Reference object is returned. Pass true for mergeIntoStore to persist\n // an object into the store.\n this.toReference = function (objOrIdOrRef, mergeIntoStore) {\n if (typeof objOrIdOrRef === \"string\") {\n return makeReference(objOrIdOrRef);\n }\n if (isReference(objOrIdOrRef)) {\n return objOrIdOrRef;\n }\n var id = _this.policies.identify(objOrIdOrRef)[0];\n if (id) {\n var ref = makeReference(id);\n if (mergeIntoStore) {\n _this.merge(id, objOrIdOrRef);\n }\n return ref;\n }\n };\n }\n // Although the EntityStore class is abstract, it contains concrete\n // implementations of the various NormalizedCache interface methods that\n // are inherited by the Root and Layer subclasses.\n EntityStore.prototype.toObject = function () {\n return __assign({}, this.data);\n };\n EntityStore.prototype.has = function (dataId) {\n return this.lookup(dataId, true) !== void 0;\n };\n EntityStore.prototype.get = function (dataId, fieldName) {\n this.group.depend(dataId, fieldName);\n if (hasOwn.call(this.data, dataId)) {\n var storeObject = this.data[dataId];\n if (storeObject && hasOwn.call(storeObject, fieldName)) {\n return storeObject[fieldName];\n }\n }\n if (fieldName === \"__typename\" &&\n hasOwn.call(this.policies.rootTypenamesById, dataId)) {\n return this.policies.rootTypenamesById[dataId];\n }\n if (this instanceof Layer) {\n return this.parent.get(dataId, fieldName);\n }\n };\n EntityStore.prototype.lookup = function (dataId, dependOnExistence) {\n // The has method (above) calls lookup with dependOnExistence = true, so\n // that it can later be invalidated when we add or remove a StoreObject for\n // this dataId. Any consumer who cares about the contents of the StoreObject\n // should not rely on this dependency, since the contents could change\n // without the object being added or removed.\n if (dependOnExistence)\n this.group.depend(dataId, \"__exists\");\n if (hasOwn.call(this.data, dataId)) {\n return this.data[dataId];\n }\n if (this instanceof Layer) {\n return this.parent.lookup(dataId, dependOnExistence);\n }\n if (this.policies.rootTypenamesById[dataId]) {\n return Object.create(null);\n }\n };\n EntityStore.prototype.merge = function (older, newer) {\n var _this = this;\n var dataId;\n // Convert unexpected references to ID strings.\n if (isReference(older))\n older = older.__ref;\n if (isReference(newer))\n newer = newer.__ref;\n var existing = typeof older === \"string\" ? this.lookup((dataId = older)) : older;\n var incoming = typeof newer === \"string\" ? this.lookup((dataId = newer)) : newer;\n // If newer was a string ID, but that ID was not defined in this store,\n // then there are no fields to be merged, so we're done.\n if (!incoming)\n return;\n invariant(typeof dataId === \"string\", 1);\n var merged = new DeepMerger(storeObjectReconciler).merge(existing, incoming);\n // Even if merged === existing, existing may have come from a lower\n // layer, so we always need to set this.data[dataId] on this level.\n this.data[dataId] = merged;\n if (merged !== existing) {\n delete this.refs[dataId];\n if (this.group.caching) {\n var fieldsToDirty_1 = Object.create(null);\n // If we added a new StoreObject where there was previously none, dirty\n // anything that depended on the existence of this dataId, such as the\n // EntityStore#has method.\n if (!existing)\n fieldsToDirty_1.__exists = 1;\n // Now invalidate dependents who called getFieldValue for any fields\n // that are changing as a result of this merge.\n Object.keys(incoming).forEach(function (storeFieldName) {\n if (!existing ||\n existing[storeFieldName] !== merged[storeFieldName]) {\n // Always dirty the full storeFieldName, which may include\n // serialized arguments following the fieldName prefix.\n fieldsToDirty_1[storeFieldName] = 1;\n // Also dirty fieldNameFromStoreName(storeFieldName) if it's\n // different from storeFieldName and this field does not have\n // keyArgs configured, because that means the cache can't make\n // any assumptions about how field values with the same field\n // name but different arguments might be interrelated, so it\n // must err on the side of invalidating all field values that\n // share the same short fieldName, regardless of arguments.\n var fieldName = fieldNameFromStoreName(storeFieldName);\n if (fieldName !== storeFieldName &&\n !_this.policies.hasKeyArgs(merged.__typename, fieldName)) {\n fieldsToDirty_1[fieldName] = 1;\n }\n // If merged[storeFieldName] has become undefined, and this is the\n // Root layer, actually delete the property from the merged object,\n // which is guaranteed to have been created fresh in this method.\n if (merged[storeFieldName] === void 0 && !(_this instanceof Layer)) {\n delete merged[storeFieldName];\n }\n }\n });\n if (fieldsToDirty_1.__typename &&\n !(existing && existing.__typename) &&\n // Since we return default root __typename strings\n // automatically from store.get, we don't need to dirty the\n // ROOT_QUERY.__typename field if merged.__typename is equal\n // to the default string (usually \"Query\").\n this.policies.rootTypenamesById[dataId] === merged.__typename) {\n delete fieldsToDirty_1.__typename;\n }\n Object.keys(fieldsToDirty_1).forEach(function (fieldName) {\n return _this.group.dirty(dataId, fieldName);\n });\n }\n }\n };\n EntityStore.prototype.modify = function (dataId, fields) {\n var _this = this;\n var storeObject = this.lookup(dataId);\n if (storeObject) {\n var changedFields_1 = Object.create(null);\n var needToMerge_1 = false;\n var allDeleted_1 = true;\n var sharedDetails_1 = {\n DELETE: DELETE,\n INVALIDATE: INVALIDATE,\n isReference: isReference,\n toReference: this.toReference,\n canRead: this.canRead,\n readField: function (fieldNameOrOptions, from) {\n return _this.policies.readField(typeof fieldNameOrOptions === \"string\" ?\n {\n fieldName: fieldNameOrOptions,\n from: from || makeReference(dataId),\n }\n : fieldNameOrOptions, { store: _this });\n },\n };\n Object.keys(storeObject).forEach(function (storeFieldName) {\n var fieldName = fieldNameFromStoreName(storeFieldName);\n var fieldValue = storeObject[storeFieldName];\n if (fieldValue === void 0)\n return;\n var modify = typeof fields === \"function\" ? fields : (fields[storeFieldName] || fields[fieldName]);\n if (modify) {\n var newValue = modify === delModifier ? DELETE : (modify(maybeDeepFreeze(fieldValue), __assign(__assign({}, sharedDetails_1), { fieldName: fieldName, storeFieldName: storeFieldName, storage: _this.getStorage(dataId, storeFieldName) })));\n if (newValue === INVALIDATE) {\n _this.group.dirty(dataId, storeFieldName);\n }\n else {\n if (newValue === DELETE)\n newValue = void 0;\n if (newValue !== fieldValue) {\n changedFields_1[storeFieldName] = newValue;\n needToMerge_1 = true;\n fieldValue = newValue;\n if (globalThis.__DEV__ !== false) {\n var checkReference = function (ref) {\n if (_this.lookup(ref.__ref) === undefined) {\n globalThis.__DEV__ !== false && invariant.warn(2, ref);\n return true;\n }\n };\n if (isReference(newValue)) {\n checkReference(newValue);\n }\n else if (Array.isArray(newValue)) {\n // Warn about writing \"mixed\" arrays of Reference and non-Reference objects\n var seenReference = false;\n var someNonReference = void 0;\n for (var _i = 0, newValue_1 = newValue; _i < newValue_1.length; _i++) {\n var value = newValue_1[_i];\n if (isReference(value)) {\n seenReference = true;\n if (checkReference(value))\n break;\n }\n else {\n // Do not warn on primitive values, since those could never be represented\n // by a reference. This is a valid (albeit uncommon) use case.\n if (typeof value === \"object\" && !!value) {\n var id = _this.policies.identify(value)[0];\n // check if object could even be referenced, otherwise we are not interested in it for this warning\n if (id) {\n someNonReference = value;\n }\n }\n }\n if (seenReference && someNonReference !== undefined) {\n globalThis.__DEV__ !== false && invariant.warn(3, someNonReference);\n break;\n }\n }\n }\n }\n }\n }\n }\n if (fieldValue !== void 0) {\n allDeleted_1 = false;\n }\n });\n if (needToMerge_1) {\n this.merge(dataId, changedFields_1);\n if (allDeleted_1) {\n if (this instanceof Layer) {\n this.data[dataId] = void 0;\n }\n else {\n delete this.data[dataId];\n }\n this.group.dirty(dataId, \"__exists\");\n }\n return true;\n }\n }\n return false;\n };\n // If called with only one argument, removes the entire entity\n // identified by dataId. If called with a fieldName as well, removes all\n // fields of that entity whose names match fieldName according to the\n // fieldNameFromStoreName helper function. If called with a fieldName\n // and variables, removes all fields of that entity whose names match fieldName\n // and whose arguments when cached exactly match the variables passed.\n EntityStore.prototype.delete = function (dataId, fieldName, args) {\n var _a;\n var storeObject = this.lookup(dataId);\n if (storeObject) {\n var typename = this.getFieldValue(storeObject, \"__typename\");\n var storeFieldName = fieldName && args ?\n this.policies.getStoreFieldName({ typename: typename, fieldName: fieldName, args: args })\n : fieldName;\n return this.modify(dataId, storeFieldName ? (_a = {},\n _a[storeFieldName] = delModifier,\n _a) : delModifier);\n }\n return false;\n };\n EntityStore.prototype.evict = function (options, limit) {\n var evicted = false;\n if (options.id) {\n if (hasOwn.call(this.data, options.id)) {\n evicted = this.delete(options.id, options.fieldName, options.args);\n }\n if (this instanceof Layer && this !== limit) {\n evicted = this.parent.evict(options, limit) || evicted;\n }\n // Always invalidate the field to trigger rereading of watched\n // queries, even if no cache data was modified by the eviction,\n // because queries may depend on computed fields with custom read\n // functions, whose values are not stored in the EntityStore.\n if (options.fieldName || evicted) {\n this.group.dirty(options.id, options.fieldName || \"__exists\");\n }\n }\n return evicted;\n };\n EntityStore.prototype.clear = function () {\n this.replace(null);\n };\n EntityStore.prototype.extract = function () {\n var _this = this;\n var obj = this.toObject();\n var extraRootIds = [];\n this.getRootIdSet().forEach(function (id) {\n if (!hasOwn.call(_this.policies.rootTypenamesById, id)) {\n extraRootIds.push(id);\n }\n });\n if (extraRootIds.length) {\n obj.__META = { extraRootIds: extraRootIds.sort() };\n }\n return obj;\n };\n EntityStore.prototype.replace = function (newData) {\n var _this = this;\n Object.keys(this.data).forEach(function (dataId) {\n if (!(newData && hasOwn.call(newData, dataId))) {\n _this.delete(dataId);\n }\n });\n if (newData) {\n var __META = newData.__META, rest_1 = __rest(newData, [\"__META\"]);\n Object.keys(rest_1).forEach(function (dataId) {\n _this.merge(dataId, rest_1[dataId]);\n });\n if (__META) {\n __META.extraRootIds.forEach(this.retain, this);\n }\n }\n };\n EntityStore.prototype.retain = function (rootId) {\n return (this.rootIds[rootId] = (this.rootIds[rootId] || 0) + 1);\n };\n EntityStore.prototype.release = function (rootId) {\n if (this.rootIds[rootId] > 0) {\n var count = --this.rootIds[rootId];\n if (!count)\n delete this.rootIds[rootId];\n return count;\n }\n return 0;\n };\n // Return a Set of all the ID strings that have been retained by\n // this layer/root *and* any layers/roots beneath it.\n EntityStore.prototype.getRootIdSet = function (ids) {\n if (ids === void 0) { ids = new Set(); }\n Object.keys(this.rootIds).forEach(ids.add, ids);\n if (this instanceof Layer) {\n this.parent.getRootIdSet(ids);\n }\n else {\n // Official singleton IDs like ROOT_QUERY and ROOT_MUTATION are\n // always considered roots for garbage collection, regardless of\n // their retainment counts in this.rootIds.\n Object.keys(this.policies.rootTypenamesById).forEach(ids.add, ids);\n }\n return ids;\n };\n // The goal of garbage collection is to remove IDs from the Root layer of the\n // store that are no longer reachable starting from any IDs that have been\n // explicitly retained (see retain and release, above). Returns an array of\n // dataId strings that were removed from the store.\n EntityStore.prototype.gc = function () {\n var _this = this;\n var ids = this.getRootIdSet();\n var snapshot = this.toObject();\n ids.forEach(function (id) {\n if (hasOwn.call(snapshot, id)) {\n // Because we are iterating over an ECMAScript Set, the IDs we add here\n // will be visited in later iterations of the forEach loop only if they\n // were not previously contained by the Set.\n Object.keys(_this.findChildRefIds(id)).forEach(ids.add, ids);\n // By removing IDs from the snapshot object here, we protect them from\n // getting removed from the root store layer below.\n delete snapshot[id];\n }\n });\n var idsToRemove = Object.keys(snapshot);\n if (idsToRemove.length) {\n var root_1 = this;\n while (root_1 instanceof Layer)\n root_1 = root_1.parent;\n idsToRemove.forEach(function (id) { return root_1.delete(id); });\n }\n return idsToRemove;\n };\n EntityStore.prototype.findChildRefIds = function (dataId) {\n if (!hasOwn.call(this.refs, dataId)) {\n var found_1 = (this.refs[dataId] = Object.create(null));\n var root = this.data[dataId];\n if (!root)\n return found_1;\n var workSet_1 = new Set([root]);\n // Within the store, only arrays and objects can contain child entity\n // references, so we can prune the traversal using this predicate:\n workSet_1.forEach(function (obj) {\n if (isReference(obj)) {\n found_1[obj.__ref] = true;\n // In rare cases, a { __ref } Reference object may have other fields.\n // This often indicates a mismerging of References with StoreObjects,\n // but garbage collection should not be fooled by a stray __ref\n // property in a StoreObject (ignoring all the other fields just\n // because the StoreObject looks like a Reference). To avoid this\n // premature termination of findChildRefIds recursion, we fall through\n // to the code below, which will handle any other properties of obj.\n }\n if (isNonNullObject(obj)) {\n Object.keys(obj).forEach(function (key) {\n var child = obj[key];\n // No need to add primitive values to the workSet, since they cannot\n // contain reference objects.\n if (isNonNullObject(child)) {\n workSet_1.add(child);\n }\n });\n }\n });\n }\n return this.refs[dataId];\n };\n EntityStore.prototype.makeCacheKey = function () {\n return this.group.keyMaker.lookupArray(arguments);\n };\n return EntityStore;\n}());\nexport { EntityStore };\n// A single CacheGroup represents a set of one or more EntityStore objects,\n// typically the Root store in a CacheGroup by itself, and all active Layer\n// stores in a group together. A single EntityStore object belongs to only\n// one CacheGroup, store.group. The CacheGroup is responsible for tracking\n// dependencies, so store.group is helpful for generating unique keys for\n// cached results that need to be invalidated when/if those dependencies\n// change. If we used the EntityStore objects themselves as cache keys (that\n// is, store rather than store.group), the cache would become unnecessarily\n// fragmented by all the different Layer objects. Instead, the CacheGroup\n// approach allows all optimistic Layer objects in the same linked list to\n// belong to one CacheGroup, with the non-optimistic Root object belonging\n// to another CacheGroup, allowing resultCaching dependencies to be tracked\n// separately for optimistic and non-optimistic entity data.\nvar CacheGroup = /** @class */ (function () {\n function CacheGroup(caching, parent) {\n if (parent === void 0) { parent = null; }\n this.caching = caching;\n this.parent = parent;\n this.d = null;\n this.resetCaching();\n }\n CacheGroup.prototype.resetCaching = function () {\n this.d = this.caching ? dep() : null;\n this.keyMaker = new Trie(canUseWeakMap);\n };\n CacheGroup.prototype.depend = function (dataId, storeFieldName) {\n if (this.d) {\n this.d(makeDepKey(dataId, storeFieldName));\n var fieldName = fieldNameFromStoreName(storeFieldName);\n if (fieldName !== storeFieldName) {\n // Fields with arguments that contribute extra identifying\n // information to the fieldName (thus forming the storeFieldName)\n // depend not only on the full storeFieldName but also on the\n // short fieldName, so the field can be invalidated using either\n // level of specificity.\n this.d(makeDepKey(dataId, fieldName));\n }\n if (this.parent) {\n this.parent.depend(dataId, storeFieldName);\n }\n }\n };\n CacheGroup.prototype.dirty = function (dataId, storeFieldName) {\n if (this.d) {\n this.d.dirty(makeDepKey(dataId, storeFieldName), \n // When storeFieldName === \"__exists\", that means the entity identified\n // by dataId has either disappeared from the cache or was newly added,\n // so the result caching system would do well to \"forget everything it\n // knows\" about that object. To achieve that kind of invalidation, we\n // not only dirty the associated result cache entry, but also remove it\n // completely from the dependency graph. For the optimism implementation\n // details, see https://github.com/benjamn/optimism/pull/195.\n storeFieldName === \"__exists\" ? \"forget\" : \"setDirty\");\n }\n };\n return CacheGroup;\n}());\nfunction makeDepKey(dataId, storeFieldName) {\n // Since field names cannot have '#' characters in them, this method\n // of joining the field name and the ID should be unambiguous, and much\n // cheaper than JSON.stringify([dataId, fieldName]).\n return storeFieldName + \"#\" + dataId;\n}\nexport function maybeDependOnExistenceOfEntity(store, entityId) {\n if (supportsResultCaching(store)) {\n // We use this pseudo-field __exists elsewhere in the EntityStore code to\n // represent changes in the existence of the entity object identified by\n // entityId. This dependency gets reliably dirtied whenever an object with\n // this ID is deleted (or newly created) within this group, so any result\n // cache entries (for example, StoreReader#executeSelectionSet results) that\n // depend on __exists for this entityId will get dirtied as well, leading to\n // the eventual recomputation (instead of reuse) of those result objects the\n // next time someone reads them from the cache.\n store.group.depend(entityId, \"__exists\");\n }\n}\n(function (EntityStore) {\n // Refer to this class as EntityStore.Root outside this namespace.\n var Root = /** @class */ (function (_super) {\n __extends(Root, _super);\n function Root(_a) {\n var policies = _a.policies, _b = _a.resultCaching, resultCaching = _b === void 0 ? true : _b, seed = _a.seed;\n var _this = _super.call(this, policies, new CacheGroup(resultCaching)) || this;\n _this.stump = new Stump(_this);\n _this.storageTrie = new Trie(canUseWeakMap);\n if (seed)\n _this.replace(seed);\n return _this;\n }\n Root.prototype.addLayer = function (layerId, replay) {\n // Adding an optimistic Layer on top of the Root actually adds the Layer\n // on top of the Stump, so the Stump always comes between the Root and\n // any Layer objects that we've added.\n return this.stump.addLayer(layerId, replay);\n };\n Root.prototype.removeLayer = function () {\n // Never remove the root layer.\n return this;\n };\n Root.prototype.getStorage = function () {\n return this.storageTrie.lookupArray(arguments);\n };\n return Root;\n }(EntityStore));\n EntityStore.Root = Root;\n})(EntityStore || (EntityStore = {}));\n// Not exported, since all Layer instances are created by the addLayer method\n// of the EntityStore.Root class.\nvar Layer = /** @class */ (function (_super) {\n __extends(Layer, _super);\n function Layer(id, parent, replay, group) {\n var _this = _super.call(this, parent.policies, group) || this;\n _this.id = id;\n _this.parent = parent;\n _this.replay = replay;\n _this.group = group;\n replay(_this);\n return _this;\n }\n Layer.prototype.addLayer = function (layerId, replay) {\n return new Layer(layerId, this, replay, this.group);\n };\n Layer.prototype.removeLayer = function (layerId) {\n var _this = this;\n // Remove all instances of the given id, not just the first one.\n var parent = this.parent.removeLayer(layerId);\n if (layerId === this.id) {\n if (this.group.caching) {\n // Dirty every ID we're removing. Technically we might be able to avoid\n // dirtying fields that have values in higher layers, but we don't have\n // easy access to higher layers here, and we're about to recreate those\n // layers anyway (see parent.addLayer below).\n Object.keys(this.data).forEach(function (dataId) {\n var ownStoreObject = _this.data[dataId];\n var parentStoreObject = parent[\"lookup\"](dataId);\n if (!parentStoreObject) {\n // The StoreObject identified by dataId was defined in this layer\n // but will be undefined in the parent layer, so we can delete the\n // whole entity using this.delete(dataId). Since we're about to\n // throw this layer away, the only goal of this deletion is to dirty\n // the removed fields.\n _this.delete(dataId);\n }\n else if (!ownStoreObject) {\n // This layer had an entry for dataId but it was undefined, which\n // means the entity was deleted in this layer, and it's about to\n // become undeleted when we remove this layer, so we need to dirty\n // all fields that are about to be reexposed.\n _this.group.dirty(dataId, \"__exists\");\n Object.keys(parentStoreObject).forEach(function (storeFieldName) {\n _this.group.dirty(dataId, storeFieldName);\n });\n }\n else if (ownStoreObject !== parentStoreObject) {\n // If ownStoreObject is not exactly the same as parentStoreObject,\n // dirty any fields whose values will change as a result of this\n // removal.\n Object.keys(ownStoreObject).forEach(function (storeFieldName) {\n if (!equal(ownStoreObject[storeFieldName], parentStoreObject[storeFieldName])) {\n _this.group.dirty(dataId, storeFieldName);\n }\n });\n }\n });\n }\n return parent;\n }\n // No changes are necessary if the parent chain remains identical.\n if (parent === this.parent)\n return this;\n // Recreate this layer on top of the new parent.\n return parent.addLayer(this.id, this.replay);\n };\n Layer.prototype.toObject = function () {\n return __assign(__assign({}, this.parent.toObject()), this.data);\n };\n Layer.prototype.findChildRefIds = function (dataId) {\n var fromParent = this.parent.findChildRefIds(dataId);\n return hasOwn.call(this.data, dataId) ? __assign(__assign({}, fromParent), _super.prototype.findChildRefIds.call(this, dataId)) : fromParent;\n };\n Layer.prototype.getStorage = function () {\n var p = this.parent;\n while (p.parent)\n p = p.parent;\n return p.getStorage.apply(p, \n // @ts-expect-error\n arguments);\n };\n return Layer;\n}(EntityStore));\n// Represents a Layer permanently installed just above the Root, which allows\n// reading optimistically (and registering optimistic dependencies) even when\n// no optimistic layers are currently active. The stump.group CacheGroup object\n// is shared by any/all Layer objects added on top of the Stump.\nvar Stump = /** @class */ (function (_super) {\n __extends(Stump, _super);\n function Stump(root) {\n return _super.call(this, \"EntityStore.Stump\", root, function () { }, new CacheGroup(root.group.caching, root.group)) || this;\n }\n Stump.prototype.removeLayer = function () {\n // Never remove the Stump layer.\n return this;\n };\n Stump.prototype.merge = function (older, newer) {\n // We never want to write any data into the Stump, so we forward any merge\n // calls to the Root instead. Another option here would be to throw an\n // exception, but the toReference(object, true) function can sometimes\n // trigger Stump writes (which used to be Root writes, before the Stump\n // concept was introduced).\n return this.parent.merge(older, newer);\n };\n return Stump;\n}(Layer));\nfunction storeObjectReconciler(existingObject, incomingObject, property) {\n var existingValue = existingObject[property];\n var incomingValue = incomingObject[property];\n // Wherever there is a key collision, prefer the incoming value, unless\n // it is deeply equal to the existing value. It's worth checking deep\n // equality here (even though blindly returning incoming would be\n // logically correct) because preserving the referential identity of\n // existing data can prevent needless rereading and rerendering.\n return equal(existingValue, incomingValue) ? existingValue : incomingValue;\n}\nexport function supportsResultCaching(store) {\n // When result caching is disabled, store.depend will be null.\n return !!(store instanceof EntityStore && store.group.caching);\n}\n//# sourceMappingURL=entityStore.js.map","import { __assign } from \"tslib\";\nimport { Trie } from \"@wry/trie\";\nimport { canUseWeakMap, canUseWeakSet, isNonNullObject as isObjectOrArray, } from \"../../utilities/index.js\";\nimport { isArray } from \"./helpers.js\";\nfunction shallowCopy(value) {\n if (isObjectOrArray(value)) {\n return isArray(value) ?\n value.slice(0)\n : __assign({ __proto__: Object.getPrototypeOf(value) }, value);\n }\n return value;\n}\n// When programmers talk about the \"canonical form\" of an object, they\n// usually have the following meaning in mind, which I've copied from\n// https://en.wiktionary.org/wiki/canonical_form:\n//\n// 1. A standard or normal presentation of a mathematical entity [or\n// object]. A canonical form is an element of a set of representatives\n// of equivalence classes of forms such that there is a function or\n// procedure which projects every element of each equivalence class\n// onto that one element, the canonical form of that equivalence\n// class. The canonical form is expected to be simpler than the rest of\n// the forms in some way.\n//\n// That's a long-winded way of saying any two objects that have the same\n// canonical form may be considered equivalent, even if they are !==,\n// which usually means the objects are structurally equivalent (deeply\n// equal), but don't necessarily use the same memory.\n//\n// Like a literary or musical canon, this ObjectCanon class represents a\n// collection of unique canonical items (JavaScript objects), with the\n// important property that canon.admit(a) === canon.admit(b) if a and b\n// are deeply equal to each other. In terms of the definition above, the\n// canon.admit method is the \"function or procedure which projects every\"\n// object \"onto that one element, the canonical form.\"\n//\n// In the worst case, the canonicalization process may involve looking at\n// every property in the provided object tree, so it takes the same order\n// of time as deep equality checking. Fortunately, already-canonicalized\n// objects are returned immediately from canon.admit, so the presence of\n// canonical subtrees tends to speed up canonicalization.\n//\n// Since consumers of canonical objects can check for deep equality in\n// constant time, canonicalizing cache results can massively improve the\n// performance of application code that skips re-rendering unchanged\n// results, such as \"pure\" UI components in a framework like React.\n//\n// Of course, since canonical objects may be shared widely between\n// unrelated consumers, it's important to think of them as immutable, even\n// though they are not actually frozen with Object.freeze in production,\n// due to the extra performance overhead that comes with frozen objects.\n//\n// Custom scalar objects whose internal class name is neither Array nor\n// Object can be included safely in the admitted tree, but they will not\n// be replaced with a canonical version (to put it another way, they are\n// assumed to be canonical already).\n//\n// If we ignore custom objects, no detection of cycles or repeated object\n// references is currently required by the StoreReader class, since\n// GraphQL result objects are JSON-serializable trees (and thus contain\n// neither cycles nor repeated subtrees), so we can avoid the complexity\n// of keeping track of objects we've already seen during the recursion of\n// the admit method.\n//\n// In the future, we may consider adding additional cases to the switch\n// statement to handle other common object types, such as \"[object Date]\"\n// objects, as needed.\nvar ObjectCanon = /** @class */ (function () {\n function ObjectCanon() {\n // Set of all canonical objects this ObjectCanon has admitted, allowing\n // canon.admit to return previously-canonicalized objects immediately.\n this.known = new (canUseWeakSet ? WeakSet : Set)();\n // Efficient storage/lookup structure for canonical objects.\n this.pool = new Trie(canUseWeakMap);\n // Make the ObjectCanon assume this value has already been\n // canonicalized.\n this.passes = new WeakMap();\n // Arrays that contain the same elements in a different order can share\n // the same SortedKeysInfo object, to save memory.\n this.keysByJSON = new Map();\n // This has to come last because it depends on keysByJSON.\n this.empty = this.admit({});\n }\n ObjectCanon.prototype.isKnown = function (value) {\n return isObjectOrArray(value) && this.known.has(value);\n };\n ObjectCanon.prototype.pass = function (value) {\n if (isObjectOrArray(value)) {\n var copy = shallowCopy(value);\n this.passes.set(copy, value);\n return copy;\n }\n return value;\n };\n ObjectCanon.prototype.admit = function (value) {\n var _this = this;\n if (isObjectOrArray(value)) {\n var original = this.passes.get(value);\n if (original)\n return original;\n var proto = Object.getPrototypeOf(value);\n switch (proto) {\n case Array.prototype: {\n if (this.known.has(value))\n return value;\n var array = value.map(this.admit, this);\n // Arrays are looked up in the Trie using their recursively\n // canonicalized elements, and the known version of the array is\n // preserved as node.array.\n var node = this.pool.lookupArray(array);\n if (!node.array) {\n this.known.add((node.array = array));\n // Since canonical arrays may be shared widely between\n // unrelated consumers, it's important to regard them as\n // immutable, even if they are not frozen in production.\n if (globalThis.__DEV__ !== false) {\n Object.freeze(array);\n }\n }\n return node.array;\n }\n case null:\n case Object.prototype: {\n if (this.known.has(value))\n return value;\n var proto_1 = Object.getPrototypeOf(value);\n var array_1 = [proto_1];\n var keys = this.sortedKeys(value);\n array_1.push(keys.json);\n var firstValueIndex_1 = array_1.length;\n keys.sorted.forEach(function (key) {\n array_1.push(_this.admit(value[key]));\n });\n // Objects are looked up in the Trie by their prototype (which\n // is *not* recursively canonicalized), followed by a JSON\n // representation of their (sorted) keys, followed by the\n // sequence of recursively canonicalized values corresponding to\n // those keys. To keep the final results unambiguous with other\n // sequences (such as arrays that just happen to contain [proto,\n // keys.json, value1, value2, ...]), the known version of the\n // object is stored as node.object.\n var node = this.pool.lookupArray(array_1);\n if (!node.object) {\n var obj_1 = (node.object = Object.create(proto_1));\n this.known.add(obj_1);\n keys.sorted.forEach(function (key, i) {\n obj_1[key] = array_1[firstValueIndex_1 + i];\n });\n // Since canonical objects may be shared widely between\n // unrelated consumers, it's important to regard them as\n // immutable, even if they are not frozen in production.\n if (globalThis.__DEV__ !== false) {\n Object.freeze(obj_1);\n }\n }\n return node.object;\n }\n }\n }\n return value;\n };\n // It's worthwhile to cache the sorting of arrays of strings, since the\n // same initial unsorted arrays tend to be encountered many times.\n // Fortunately, we can reuse the Trie machinery to look up the sorted\n // arrays in linear time (which is faster than sorting large arrays).\n ObjectCanon.prototype.sortedKeys = function (obj) {\n var keys = Object.keys(obj);\n var node = this.pool.lookupArray(keys);\n if (!node.keys) {\n keys.sort();\n var json = JSON.stringify(keys);\n if (!(node.keys = this.keysByJSON.get(json))) {\n this.keysByJSON.set(json, (node.keys = { sorted: keys, json: json }));\n }\n }\n return node.keys;\n };\n return ObjectCanon;\n}());\nexport { ObjectCanon };\n//# sourceMappingURL=object-canon.js.map","import { __assign } from \"tslib\";\nimport { invariant, newInvariantError } from \"../../utilities/globals/index.js\";\nimport { Kind } from \"graphql\";\nimport { wrap } from \"optimism\";\nimport { isField, resultKeyNameFromField, isReference, makeReference, shouldInclude, addTypenameToDocument, getDefaultValues, getMainDefinition, getQueryDefinition, getFragmentFromSelection, maybeDeepFreeze, mergeDeepArray, DeepMerger, isNonNullObject, canUseWeakMap, compact, canonicalStringify, cacheSizes, } from \"../../utilities/index.js\";\nimport { maybeDependOnExistenceOfEntity, supportsResultCaching, } from \"./entityStore.js\";\nimport { isArray, extractFragmentContext, getTypenameFromStoreObject, shouldCanonizeResults, } from \"./helpers.js\";\nimport { MissingFieldError } from \"../core/types/common.js\";\nimport { ObjectCanon } from \"./object-canon.js\";\nfunction execSelectionSetKeyArgs(options) {\n return [\n options.selectionSet,\n options.objectOrReference,\n options.context,\n // We split out this property so we can pass different values\n // independently without modifying options.context itself.\n options.context.canonizeResults,\n ];\n}\nvar StoreReader = /** @class */ (function () {\n function StoreReader(config) {\n var _this = this;\n this.knownResults = new (canUseWeakMap ? WeakMap : Map)();\n this.config = compact(config, {\n addTypename: config.addTypename !== false,\n canonizeResults: shouldCanonizeResults(config),\n });\n this.canon = config.canon || new ObjectCanon();\n // memoized functions in this class will be \"garbage-collected\"\n // by recreating the whole `StoreReader` in\n // `InMemoryCache.resetResultsCache`\n // (triggered from `InMemoryCache.gc` with `resetResultCache: true`)\n this.executeSelectionSet = wrap(function (options) {\n var _a;\n var canonizeResults = options.context.canonizeResults;\n var peekArgs = execSelectionSetKeyArgs(options);\n // Negate this boolean option so we can find out if we've already read\n // this result using the other boolean value.\n peekArgs[3] = !canonizeResults;\n var other = (_a = _this.executeSelectionSet).peek.apply(_a, peekArgs);\n if (other) {\n if (canonizeResults) {\n return __assign(__assign({}, other), { \n // If we previously read this result without canonizing it, we can\n // reuse that result simply by canonizing it now.\n result: _this.canon.admit(other.result) });\n }\n // If we previously read this result with canonization enabled, we can\n // return that canonized result as-is.\n return other;\n }\n maybeDependOnExistenceOfEntity(options.context.store, options.enclosingRef.__ref);\n // Finally, if we didn't find any useful previous results, run the real\n // execSelectionSetImpl method with the given options.\n return _this.execSelectionSetImpl(options);\n }, {\n max: this.config.resultCacheMaxSize ||\n cacheSizes[\"inMemoryCache.executeSelectionSet\"] ||\n 50000 /* defaultCacheSizes[\"inMemoryCache.executeSelectionSet\"] */,\n keyArgs: execSelectionSetKeyArgs,\n // Note that the parameters of makeCacheKey are determined by the\n // array returned by keyArgs.\n makeCacheKey: function (selectionSet, parent, context, canonizeResults) {\n if (supportsResultCaching(context.store)) {\n return context.store.makeCacheKey(selectionSet, isReference(parent) ? parent.__ref : parent, context.varString, canonizeResults);\n }\n },\n });\n this.executeSubSelectedArray = wrap(function (options) {\n maybeDependOnExistenceOfEntity(options.context.store, options.enclosingRef.__ref);\n return _this.execSubSelectedArrayImpl(options);\n }, {\n max: this.config.resultCacheMaxSize ||\n cacheSizes[\"inMemoryCache.executeSubSelectedArray\"] ||\n 10000 /* defaultCacheSizes[\"inMemoryCache.executeSubSelectedArray\"] */,\n makeCacheKey: function (_a) {\n var field = _a.field, array = _a.array, context = _a.context;\n if (supportsResultCaching(context.store)) {\n return context.store.makeCacheKey(field, array, context.varString);\n }\n },\n });\n }\n StoreReader.prototype.resetCanon = function () {\n this.canon = new ObjectCanon();\n };\n /**\n * Given a store and a query, return as much of the result as possible and\n * identify if any data was missing from the store.\n */\n StoreReader.prototype.diffQueryAgainstStore = function (_a) {\n var store = _a.store, query = _a.query, _b = _a.rootId, rootId = _b === void 0 ? \"ROOT_QUERY\" : _b, variables = _a.variables, _c = _a.returnPartialData, returnPartialData = _c === void 0 ? true : _c, _d = _a.canonizeResults, canonizeResults = _d === void 0 ? this.config.canonizeResults : _d;\n var policies = this.config.cache.policies;\n variables = __assign(__assign({}, getDefaultValues(getQueryDefinition(query))), variables);\n var rootRef = makeReference(rootId);\n var execResult = this.executeSelectionSet({\n selectionSet: getMainDefinition(query).selectionSet,\n objectOrReference: rootRef,\n enclosingRef: rootRef,\n context: __assign({ store: store, query: query, policies: policies, variables: variables, varString: canonicalStringify(variables), canonizeResults: canonizeResults }, extractFragmentContext(query, this.config.fragments)),\n });\n var missing;\n if (execResult.missing) {\n // For backwards compatibility we still report an array of\n // MissingFieldError objects, even though there will only ever be at most\n // one of them, now that all missing field error messages are grouped\n // together in the execResult.missing tree.\n missing = [\n new MissingFieldError(firstMissing(execResult.missing), execResult.missing, query, variables),\n ];\n if (!returnPartialData) {\n throw missing[0];\n }\n }\n return {\n result: execResult.result,\n complete: !missing,\n missing: missing,\n };\n };\n StoreReader.prototype.isFresh = function (result, parent, selectionSet, context) {\n if (supportsResultCaching(context.store) &&\n this.knownResults.get(result) === selectionSet) {\n var latest = this.executeSelectionSet.peek(selectionSet, parent, context, \n // If result is canonical, then it could only have been previously\n // cached by the canonizing version of executeSelectionSet, so we can\n // avoid checking both possibilities here.\n this.canon.isKnown(result));\n if (latest && result === latest.result) {\n return true;\n }\n }\n return false;\n };\n // Uncached version of executeSelectionSet.\n StoreReader.prototype.execSelectionSetImpl = function (_a) {\n var _this = this;\n var selectionSet = _a.selectionSet, objectOrReference = _a.objectOrReference, enclosingRef = _a.enclosingRef, context = _a.context;\n if (isReference(objectOrReference) &&\n !context.policies.rootTypenamesById[objectOrReference.__ref] &&\n !context.store.has(objectOrReference.__ref)) {\n return {\n result: this.canon.empty,\n missing: \"Dangling reference to missing \".concat(objectOrReference.__ref, \" object\"),\n };\n }\n var variables = context.variables, policies = context.policies, store = context.store;\n var typename = store.getFieldValue(objectOrReference, \"__typename\");\n var objectsToMerge = [];\n var missing;\n var missingMerger = new DeepMerger();\n if (this.config.addTypename &&\n typeof typename === \"string\" &&\n !policies.rootIdsByTypename[typename]) {\n // Ensure we always include a default value for the __typename\n // field, if we have one, and this.config.addTypename is true. Note\n // that this field can be overridden by other merged objects.\n objectsToMerge.push({ __typename: typename });\n }\n function handleMissing(result, resultName) {\n var _a;\n if (result.missing) {\n missing = missingMerger.merge(missing, (_a = {},\n _a[resultName] = result.missing,\n _a));\n }\n return result.result;\n }\n var workSet = new Set(selectionSet.selections);\n workSet.forEach(function (selection) {\n var _a, _b;\n // Omit fields with directives @skip(if: ) or\n // @include(if: ).\n if (!shouldInclude(selection, variables))\n return;\n if (isField(selection)) {\n var fieldValue = policies.readField({\n fieldName: selection.name.value,\n field: selection,\n variables: context.variables,\n from: objectOrReference,\n }, context);\n var resultName = resultKeyNameFromField(selection);\n if (fieldValue === void 0) {\n if (!addTypenameToDocument.added(selection)) {\n missing = missingMerger.merge(missing, (_a = {},\n _a[resultName] = \"Can't find field '\".concat(selection.name.value, \"' on \").concat(isReference(objectOrReference) ?\n objectOrReference.__ref + \" object\"\n : \"object \" + JSON.stringify(objectOrReference, null, 2)),\n _a));\n }\n }\n else if (isArray(fieldValue)) {\n if (fieldValue.length > 0) {\n fieldValue = handleMissing(_this.executeSubSelectedArray({\n field: selection,\n array: fieldValue,\n enclosingRef: enclosingRef,\n context: context,\n }), resultName);\n }\n }\n else if (!selection.selectionSet) {\n // If the field does not have a selection set, then we handle it\n // as a scalar value. To keep this.canon from canonicalizing\n // this value, we use this.canon.pass to wrap fieldValue in a\n // Pass object that this.canon.admit will later unwrap as-is.\n if (context.canonizeResults) {\n fieldValue = _this.canon.pass(fieldValue);\n }\n }\n else if (fieldValue != null) {\n // In this case, because we know the field has a selection set,\n // it must be trying to query a GraphQLObjectType, which is why\n // fieldValue must be != null.\n fieldValue = handleMissing(_this.executeSelectionSet({\n selectionSet: selection.selectionSet,\n objectOrReference: fieldValue,\n enclosingRef: isReference(fieldValue) ? fieldValue : enclosingRef,\n context: context,\n }), resultName);\n }\n if (fieldValue !== void 0) {\n objectsToMerge.push((_b = {}, _b[resultName] = fieldValue, _b));\n }\n }\n else {\n var fragment = getFragmentFromSelection(selection, context.lookupFragment);\n if (!fragment && selection.kind === Kind.FRAGMENT_SPREAD) {\n throw newInvariantError(9, selection.name.value);\n }\n if (fragment && policies.fragmentMatches(fragment, typename)) {\n fragment.selectionSet.selections.forEach(workSet.add, workSet);\n }\n }\n });\n var result = mergeDeepArray(objectsToMerge);\n var finalResult = { result: result, missing: missing };\n var frozen = context.canonizeResults ?\n this.canon.admit(finalResult)\n // Since this.canon is normally responsible for freezing results (only in\n // development), freeze them manually if canonization is disabled.\n : maybeDeepFreeze(finalResult);\n // Store this result with its selection set so that we can quickly\n // recognize it again in the StoreReader#isFresh method.\n if (frozen.result) {\n this.knownResults.set(frozen.result, selectionSet);\n }\n return frozen;\n };\n // Uncached version of executeSubSelectedArray.\n StoreReader.prototype.execSubSelectedArrayImpl = function (_a) {\n var _this = this;\n var field = _a.field, array = _a.array, enclosingRef = _a.enclosingRef, context = _a.context;\n var missing;\n var missingMerger = new DeepMerger();\n function handleMissing(childResult, i) {\n var _a;\n if (childResult.missing) {\n missing = missingMerger.merge(missing, (_a = {}, _a[i] = childResult.missing, _a));\n }\n return childResult.result;\n }\n if (field.selectionSet) {\n array = array.filter(context.store.canRead);\n }\n array = array.map(function (item, i) {\n // null value in array\n if (item === null) {\n return null;\n }\n // This is a nested array, recurse\n if (isArray(item)) {\n return handleMissing(_this.executeSubSelectedArray({\n field: field,\n array: item,\n enclosingRef: enclosingRef,\n context: context,\n }), i);\n }\n // This is an object, run the selection set on it\n if (field.selectionSet) {\n return handleMissing(_this.executeSelectionSet({\n selectionSet: field.selectionSet,\n objectOrReference: item,\n enclosingRef: isReference(item) ? item : enclosingRef,\n context: context,\n }), i);\n }\n if (globalThis.__DEV__ !== false) {\n assertSelectionSetForIdValue(context.store, field, item);\n }\n return item;\n });\n return {\n result: context.canonizeResults ? this.canon.admit(array) : array,\n missing: missing,\n };\n };\n return StoreReader;\n}());\nexport { StoreReader };\nfunction firstMissing(tree) {\n try {\n JSON.stringify(tree, function (_, value) {\n if (typeof value === \"string\")\n throw value;\n return value;\n });\n }\n catch (result) {\n return result;\n }\n}\nfunction assertSelectionSetForIdValue(store, field, fieldValue) {\n if (!field.selectionSet) {\n var workSet_1 = new Set([fieldValue]);\n workSet_1.forEach(function (value) {\n if (isNonNullObject(value)) {\n invariant(\n !isReference(value),\n 10,\n getTypenameFromStoreObject(store, value),\n field.name.value\n );\n Object.values(value).forEach(workSet_1.add, workSet_1);\n }\n });\n }\n}\n//# sourceMappingURL=readFromStore.js.map","import { invariant } from \"../../utilities/globals/index.js\";\nimport { argumentsObjectFromField, DeepMerger, isNonEmptyArray, isNonNullObject, } from \"../../utilities/index.js\";\nimport { hasOwn, isArray } from \"./helpers.js\";\n// Mapping from JSON-encoded KeySpecifier strings to associated information.\nvar specifierInfoCache = Object.create(null);\nfunction lookupSpecifierInfo(spec) {\n // It's safe to encode KeySpecifier arrays with JSON.stringify, since they're\n // just arrays of strings or nested KeySpecifier arrays, and the order of the\n // array elements is important (and suitably preserved by JSON.stringify).\n var cacheKey = JSON.stringify(spec);\n return (specifierInfoCache[cacheKey] ||\n (specifierInfoCache[cacheKey] = Object.create(null)));\n}\nexport function keyFieldsFnFromSpecifier(specifier) {\n var info = lookupSpecifierInfo(specifier);\n return (info.keyFieldsFn || (info.keyFieldsFn = function (object, context) {\n var extract = function (from, key) {\n return context.readField(key, from);\n };\n var keyObject = (context.keyObject = collectSpecifierPaths(specifier, function (schemaKeyPath) {\n var extracted = extractKeyPath(context.storeObject, schemaKeyPath, \n // Using context.readField to extract paths from context.storeObject\n // allows the extraction to see through Reference objects and respect\n // custom read functions.\n extract);\n if (extracted === void 0 &&\n object !== context.storeObject &&\n hasOwn.call(object, schemaKeyPath[0])) {\n // If context.storeObject fails to provide a value for the requested\n // path, fall back to the raw result object, if it has a top-level key\n // matching the first key in the path (schemaKeyPath[0]). This allows\n // key fields included in the written data to be saved in the cache\n // even if they are not selected explicitly in context.selectionSet.\n // Not being mentioned by context.selectionSet is convenient here,\n // since it means these extra fields cannot be affected by field\n // aliasing, which is why we can use extractKey instead of\n // context.readField for this extraction.\n extracted = extractKeyPath(object, schemaKeyPath, extractKey);\n }\n invariant(extracted !== void 0, 4, schemaKeyPath.join(\".\"), object);\n return extracted;\n }));\n return \"\".concat(context.typename, \":\").concat(JSON.stringify(keyObject));\n }));\n}\n// The keyArgs extraction process is roughly analogous to keyFields extraction,\n// but there are no aliases involved, missing fields are tolerated (by merely\n// omitting them from the key), and drawing from field.directives or variables\n// is allowed (in addition to drawing from the field's arguments object).\n// Concretely, these differences mean passing a different key path extractor\n// function to collectSpecifierPaths, reusing the shared extractKeyPath helper\n// wherever possible.\nexport function keyArgsFnFromSpecifier(specifier) {\n var info = lookupSpecifierInfo(specifier);\n return (info.keyArgsFn ||\n (info.keyArgsFn = function (args, _a) {\n var field = _a.field, variables = _a.variables, fieldName = _a.fieldName;\n var collected = collectSpecifierPaths(specifier, function (keyPath) {\n var firstKey = keyPath[0];\n var firstChar = firstKey.charAt(0);\n if (firstChar === \"@\") {\n if (field && isNonEmptyArray(field.directives)) {\n var directiveName_1 = firstKey.slice(1);\n // If the directive appears multiple times, only the first\n // occurrence's arguments will be used. TODO Allow repetition?\n // TODO Cache this work somehow, a la aliasMap?\n var d = field.directives.find(function (d) { return d.name.value === directiveName_1; });\n // Fortunately argumentsObjectFromField works for DirectiveNode!\n var directiveArgs = d && argumentsObjectFromField(d, variables);\n // For directives without arguments (d defined, but directiveArgs ===\n // null), the presence or absence of the directive still counts as\n // part of the field key, so we return null in those cases. If no\n // directive with this name was found for this field (d undefined and\n // thus directiveArgs undefined), we return undefined, which causes\n // this value to be omitted from the key object returned by\n // collectSpecifierPaths.\n return (directiveArgs &&\n extractKeyPath(directiveArgs, \n // If keyPath.length === 1, this code calls extractKeyPath with an\n // empty path, which works because it uses directiveArgs as the\n // extracted value.\n keyPath.slice(1)));\n }\n // If the key started with @ but there was no corresponding directive,\n // we want to omit this value from the key object, not fall through to\n // treating @whatever as a normal argument name.\n return;\n }\n if (firstChar === \"$\") {\n var variableName = firstKey.slice(1);\n if (variables && hasOwn.call(variables, variableName)) {\n var varKeyPath = keyPath.slice(0);\n varKeyPath[0] = variableName;\n return extractKeyPath(variables, varKeyPath);\n }\n // If the key started with $ but there was no corresponding variable, we\n // want to omit this value from the key object, not fall through to\n // treating $whatever as a normal argument name.\n return;\n }\n if (args) {\n return extractKeyPath(args, keyPath);\n }\n });\n var suffix = JSON.stringify(collected);\n // If no arguments were passed to this field, and it didn't have any other\n // field key contributions from directives or variables, hide the empty\n // :{} suffix from the field key. However, a field passed no arguments can\n // still end up with a non-empty :{...} suffix if its key configuration\n // refers to directives or variables.\n if (args || suffix !== \"{}\") {\n fieldName += \":\" + suffix;\n }\n return fieldName;\n }));\n}\nexport function collectSpecifierPaths(specifier, extractor) {\n // For each path specified by specifier, invoke the extractor, and repeatedly\n // merge the results together, with appropriate ancestor context.\n var merger = new DeepMerger();\n return getSpecifierPaths(specifier).reduce(function (collected, path) {\n var _a;\n var toMerge = extractor(path);\n if (toMerge !== void 0) {\n // This path is not expected to contain array indexes, so the toMerge\n // reconstruction will not contain arrays. TODO Fix this?\n for (var i = path.length - 1; i >= 0; --i) {\n toMerge = (_a = {}, _a[path[i]] = toMerge, _a);\n }\n collected = merger.merge(collected, toMerge);\n }\n return collected;\n }, Object.create(null));\n}\nexport function getSpecifierPaths(spec) {\n var info = lookupSpecifierInfo(spec);\n if (!info.paths) {\n var paths_1 = (info.paths = []);\n var currentPath_1 = [];\n spec.forEach(function (s, i) {\n if (isArray(s)) {\n getSpecifierPaths(s).forEach(function (p) { return paths_1.push(currentPath_1.concat(p)); });\n currentPath_1.length = 0;\n }\n else {\n currentPath_1.push(s);\n if (!isArray(spec[i + 1])) {\n paths_1.push(currentPath_1.slice(0));\n currentPath_1.length = 0;\n }\n }\n });\n }\n return info.paths;\n}\nfunction extractKey(object, key) {\n return object[key];\n}\nexport function extractKeyPath(object, path, extract) {\n // For each key in path, extract the corresponding child property from obj,\n // flattening arrays if encountered (uncommon for keyFields and keyArgs, but\n // possible). The final result of path.reduce is normalized so unexpected leaf\n // objects have their keys safely sorted. That final result is difficult to\n // type as anything other than any. You're welcome to try to improve the\n // return type, but keep in mind extractKeyPath is not a public function\n // (exported only for testing), so the effort may not be worthwhile unless the\n // limited set of actual callers (see above) pass arguments that TypeScript\n // can statically type. If we know only that path is some array of strings\n // (and not, say, a specific tuple of statically known strings), any (or\n // possibly unknown) is the honest answer.\n extract = extract || extractKey;\n return normalize(path.reduce(function reducer(obj, key) {\n return isArray(obj) ?\n obj.map(function (child) { return reducer(child, key); })\n : obj && extract(obj, key);\n }, object));\n}\nfunction normalize(value) {\n // Usually the extracted value will be a scalar value, since most primary\n // key fields are scalar, but just in case we get an object or an array, we\n // need to do some normalization of the order of (nested) keys.\n if (isNonNullObject(value)) {\n if (isArray(value)) {\n return value.map(normalize);\n }\n return collectSpecifierPaths(Object.keys(value).sort(), function (path) {\n return extractKeyPath(value, path);\n });\n }\n return value;\n}\n//# sourceMappingURL=key-extractor.js.map","import { __assign, __rest } from \"tslib\";\nimport { invariant, newInvariantError } from \"../../utilities/globals/index.js\";\nimport { storeKeyNameFromField, argumentsObjectFromField, isReference, getStoreKeyName, isNonNullObject, stringifyForDisplay, } from \"../../utilities/index.js\";\nimport { hasOwn, fieldNameFromStoreName, storeValueIsStoreObject, selectionSetMatchesResult, TypeOrFieldNameRegExp, defaultDataIdFromObject, isArray, } from \"./helpers.js\";\nimport { cacheSlot } from \"./reactiveVars.js\";\nimport { keyArgsFnFromSpecifier, keyFieldsFnFromSpecifier, } from \"./key-extractor.js\";\nfunction argsFromFieldSpecifier(spec) {\n return (spec.args !== void 0 ? spec.args\n : spec.field ? argumentsObjectFromField(spec.field, spec.variables)\n : null);\n}\nvar nullKeyFieldsFn = function () { return void 0; };\nvar simpleKeyArgsFn = function (_args, context) { return context.fieldName; };\n// These merge functions can be selected by specifying merge:true or\n// merge:false in a field policy.\nvar mergeTrueFn = function (existing, incoming, _a) {\n var mergeObjects = _a.mergeObjects;\n return mergeObjects(existing, incoming);\n};\nvar mergeFalseFn = function (_, incoming) { return incoming; };\nvar Policies = /** @class */ (function () {\n function Policies(config) {\n this.config = config;\n this.typePolicies = Object.create(null);\n this.toBeAdded = Object.create(null);\n // Map from subtype names to sets of supertype names. Note that this\n // representation inverts the structure of possibleTypes (whose keys are\n // supertypes and whose values are arrays of subtypes) because it tends\n // to be much more efficient to search upwards than downwards.\n this.supertypeMap = new Map();\n // Any fuzzy subtypes specified by possibleTypes will be converted to\n // RegExp objects and recorded here. Every key of this map can also be\n // found in supertypeMap. In many cases this Map will be empty, which\n // means no fuzzy subtype checking will happen in fragmentMatches.\n this.fuzzySubtypes = new Map();\n this.rootIdsByTypename = Object.create(null);\n this.rootTypenamesById = Object.create(null);\n this.usingPossibleTypes = false;\n this.config = __assign({ dataIdFromObject: defaultDataIdFromObject }, config);\n this.cache = this.config.cache;\n this.setRootTypename(\"Query\");\n this.setRootTypename(\"Mutation\");\n this.setRootTypename(\"Subscription\");\n if (config.possibleTypes) {\n this.addPossibleTypes(config.possibleTypes);\n }\n if (config.typePolicies) {\n this.addTypePolicies(config.typePolicies);\n }\n }\n Policies.prototype.identify = function (object, partialContext) {\n var _a;\n var policies = this;\n var typename = (partialContext &&\n (partialContext.typename || ((_a = partialContext.storeObject) === null || _a === void 0 ? void 0 : _a.__typename))) ||\n object.__typename;\n // It should be possible to write root Query fields with writeFragment,\n // using { __typename: \"Query\", ... } as the data, but it does not make\n // sense to allow the same identification behavior for the Mutation and\n // Subscription types, since application code should never be writing\n // directly to (or reading directly from) those root objects.\n if (typename === this.rootTypenamesById.ROOT_QUERY) {\n return [\"ROOT_QUERY\"];\n }\n // Default context.storeObject to object if not otherwise provided.\n var storeObject = (partialContext && partialContext.storeObject) || object;\n var context = __assign(__assign({}, partialContext), { typename: typename, storeObject: storeObject, readField: (partialContext && partialContext.readField) ||\n function () {\n var options = normalizeReadFieldOptions(arguments, storeObject);\n return policies.readField(options, {\n store: policies.cache[\"data\"],\n variables: options.variables,\n });\n } });\n var id;\n var policy = typename && this.getTypePolicy(typename);\n var keyFn = (policy && policy.keyFn) || this.config.dataIdFromObject;\n while (keyFn) {\n var specifierOrId = keyFn(__assign(__assign({}, object), storeObject), context);\n if (isArray(specifierOrId)) {\n keyFn = keyFieldsFnFromSpecifier(specifierOrId);\n }\n else {\n id = specifierOrId;\n break;\n }\n }\n id = id ? String(id) : void 0;\n return context.keyObject ? [id, context.keyObject] : [id];\n };\n Policies.prototype.addTypePolicies = function (typePolicies) {\n var _this = this;\n Object.keys(typePolicies).forEach(function (typename) {\n var _a = typePolicies[typename], queryType = _a.queryType, mutationType = _a.mutationType, subscriptionType = _a.subscriptionType, incoming = __rest(_a, [\"queryType\", \"mutationType\", \"subscriptionType\"]);\n // Though {query,mutation,subscription}Type configurations are rare,\n // it's important to call setRootTypename as early as possible,\n // since these configurations should apply consistently for the\n // entire lifetime of the cache. Also, since only one __typename can\n // qualify as one of these root types, these three properties cannot\n // be inherited, unlike the rest of the incoming properties. That\n // restriction is convenient, because the purpose of this.toBeAdded\n // is to delay the processing of type/field policies until the first\n // time they're used, allowing policies to be added in any order as\n // long as all relevant policies (including policies for supertypes)\n // have been added by the time a given policy is used for the first\n // time. In other words, since inheritance doesn't matter for these\n // properties, there's also no need to delay their processing using\n // the this.toBeAdded queue.\n if (queryType)\n _this.setRootTypename(\"Query\", typename);\n if (mutationType)\n _this.setRootTypename(\"Mutation\", typename);\n if (subscriptionType)\n _this.setRootTypename(\"Subscription\", typename);\n if (hasOwn.call(_this.toBeAdded, typename)) {\n _this.toBeAdded[typename].push(incoming);\n }\n else {\n _this.toBeAdded[typename] = [incoming];\n }\n });\n };\n Policies.prototype.updateTypePolicy = function (typename, incoming) {\n var _this = this;\n var existing = this.getTypePolicy(typename);\n var keyFields = incoming.keyFields, fields = incoming.fields;\n function setMerge(existing, merge) {\n existing.merge =\n typeof merge === \"function\" ? merge\n // Pass merge:true as a shorthand for a merge implementation\n // that returns options.mergeObjects(existing, incoming).\n : merge === true ? mergeTrueFn\n // Pass merge:false to make incoming always replace existing\n // without any warnings about data clobbering.\n : merge === false ? mergeFalseFn\n : existing.merge;\n }\n // Type policies can define merge functions, as an alternative to\n // using field policies to merge child objects.\n setMerge(existing, incoming.merge);\n existing.keyFn =\n // Pass false to disable normalization for this typename.\n keyFields === false ? nullKeyFieldsFn\n // Pass an array of strings to use those fields to compute a\n // composite ID for objects of this typename.\n : isArray(keyFields) ? keyFieldsFnFromSpecifier(keyFields)\n // Pass a function to take full control over identification.\n : typeof keyFields === \"function\" ? keyFields\n // Leave existing.keyFn unchanged if above cases fail.\n : existing.keyFn;\n if (fields) {\n Object.keys(fields).forEach(function (fieldName) {\n var existing = _this.getFieldPolicy(typename, fieldName, true);\n var incoming = fields[fieldName];\n if (typeof incoming === \"function\") {\n existing.read = incoming;\n }\n else {\n var keyArgs = incoming.keyArgs, read = incoming.read, merge = incoming.merge;\n existing.keyFn =\n // Pass false to disable argument-based differentiation of\n // field identities.\n keyArgs === false ? simpleKeyArgsFn\n // Pass an array of strings to use named arguments to\n // compute a composite identity for the field.\n : isArray(keyArgs) ? keyArgsFnFromSpecifier(keyArgs)\n // Pass a function to take full control over field identity.\n : typeof keyArgs === \"function\" ? keyArgs\n // Leave existing.keyFn unchanged if above cases fail.\n : existing.keyFn;\n if (typeof read === \"function\") {\n existing.read = read;\n }\n setMerge(existing, merge);\n }\n if (existing.read && existing.merge) {\n // If we have both a read and a merge function, assume\n // keyArgs:false, because read and merge together can take\n // responsibility for interpreting arguments in and out. This\n // default assumption can always be overridden by specifying\n // keyArgs explicitly in the FieldPolicy.\n existing.keyFn = existing.keyFn || simpleKeyArgsFn;\n }\n });\n }\n };\n Policies.prototype.setRootTypename = function (which, typename) {\n if (typename === void 0) { typename = which; }\n var rootId = \"ROOT_\" + which.toUpperCase();\n var old = this.rootTypenamesById[rootId];\n if (typename !== old) {\n invariant(!old || old === which, 5, which);\n // First, delete any old __typename associated with this rootId from\n // rootIdsByTypename.\n if (old)\n delete this.rootIdsByTypename[old];\n // Now make this the only __typename that maps to this rootId.\n this.rootIdsByTypename[typename] = rootId;\n // Finally, update the __typename associated with this rootId.\n this.rootTypenamesById[rootId] = typename;\n }\n };\n Policies.prototype.addPossibleTypes = function (possibleTypes) {\n var _this = this;\n this.usingPossibleTypes = true;\n Object.keys(possibleTypes).forEach(function (supertype) {\n // Make sure all types have an entry in this.supertypeMap, even if\n // their supertype set is empty, so we can return false immediately\n // from policies.fragmentMatches for unknown supertypes.\n _this.getSupertypeSet(supertype, true);\n possibleTypes[supertype].forEach(function (subtype) {\n _this.getSupertypeSet(subtype, true).add(supertype);\n var match = subtype.match(TypeOrFieldNameRegExp);\n if (!match || match[0] !== subtype) {\n // TODO Don't interpret just any invalid typename as a RegExp.\n _this.fuzzySubtypes.set(subtype, new RegExp(subtype));\n }\n });\n });\n };\n Policies.prototype.getTypePolicy = function (typename) {\n var _this = this;\n if (!hasOwn.call(this.typePolicies, typename)) {\n var policy_1 = (this.typePolicies[typename] = Object.create(null));\n policy_1.fields = Object.create(null);\n // When the TypePolicy for typename is first accessed, instead of\n // starting with an empty policy object, inherit any properties or\n // fields from the type policies of the supertypes of typename.\n //\n // Any properties or fields defined explicitly within the TypePolicy\n // for typename will take precedence, and if there are multiple\n // supertypes, the properties of policies whose types were added\n // later via addPossibleTypes will take precedence over those of\n // earlier supertypes. TODO Perhaps we should warn about these\n // conflicts in development, and recommend defining the property\n // explicitly in the subtype policy?\n //\n // Field policy inheritance is atomic/shallow: you can't inherit a\n // field policy and then override just its read function, since read\n // and merge functions often need to cooperate, so changing only one\n // of them would be a recipe for inconsistency.\n //\n // Once the TypePolicy for typename has been accessed, its properties can\n // still be updated directly using addTypePolicies, but future changes to\n // inherited supertype policies will not be reflected in this subtype\n // policy, because this code runs at most once per typename.\n var supertypes_1 = this.supertypeMap.get(typename);\n if (!supertypes_1 && this.fuzzySubtypes.size) {\n // To make the inheritance logic work for unknown typename strings that\n // may have fuzzy supertypes, we give this typename an empty supertype\n // set and then populate it with any fuzzy supertypes that match.\n supertypes_1 = this.getSupertypeSet(typename, true);\n // This only works for typenames that are directly matched by a fuzzy\n // supertype. What if there is an intermediate chain of supertypes?\n // While possible, that situation can only be solved effectively by\n // specifying the intermediate relationships via possibleTypes, manually\n // and in a non-fuzzy way.\n this.fuzzySubtypes.forEach(function (regExp, fuzzy) {\n if (regExp.test(typename)) {\n // The fuzzy parameter is just the original string version of regExp\n // (not a valid __typename string), but we can look up the\n // associated supertype(s) in this.supertypeMap.\n var fuzzySupertypes = _this.supertypeMap.get(fuzzy);\n if (fuzzySupertypes) {\n fuzzySupertypes.forEach(function (supertype) {\n return supertypes_1.add(supertype);\n });\n }\n }\n });\n }\n if (supertypes_1 && supertypes_1.size) {\n supertypes_1.forEach(function (supertype) {\n var _a = _this.getTypePolicy(supertype), fields = _a.fields, rest = __rest(_a, [\"fields\"]);\n Object.assign(policy_1, rest);\n Object.assign(policy_1.fields, fields);\n });\n }\n }\n var inbox = this.toBeAdded[typename];\n if (inbox && inbox.length) {\n // Merge the pending policies into this.typePolicies, in the order they\n // were originally passed to addTypePolicy.\n inbox.splice(0).forEach(function (policy) {\n _this.updateTypePolicy(typename, policy);\n });\n }\n return this.typePolicies[typename];\n };\n Policies.prototype.getFieldPolicy = function (typename, fieldName, createIfMissing) {\n if (typename) {\n var fieldPolicies = this.getTypePolicy(typename).fields;\n return (fieldPolicies[fieldName] ||\n (createIfMissing && (fieldPolicies[fieldName] = Object.create(null))));\n }\n };\n Policies.prototype.getSupertypeSet = function (subtype, createIfMissing) {\n var supertypeSet = this.supertypeMap.get(subtype);\n if (!supertypeSet && createIfMissing) {\n this.supertypeMap.set(subtype, (supertypeSet = new Set()));\n }\n return supertypeSet;\n };\n Policies.prototype.fragmentMatches = function (fragment, typename, result, variables) {\n var _this = this;\n if (!fragment.typeCondition)\n return true;\n // If the fragment has a type condition but the object we're matching\n // against does not have a __typename, the fragment cannot match.\n if (!typename)\n return false;\n var supertype = fragment.typeCondition.name.value;\n // Common case: fragment type condition and __typename are the same.\n if (typename === supertype)\n return true;\n if (this.usingPossibleTypes && this.supertypeMap.has(supertype)) {\n var typenameSupertypeSet = this.getSupertypeSet(typename, true);\n var workQueue_1 = [typenameSupertypeSet];\n var maybeEnqueue_1 = function (subtype) {\n var supertypeSet = _this.getSupertypeSet(subtype, false);\n if (supertypeSet &&\n supertypeSet.size &&\n workQueue_1.indexOf(supertypeSet) < 0) {\n workQueue_1.push(supertypeSet);\n }\n };\n // We need to check fuzzy subtypes only if we encountered fuzzy\n // subtype strings in addPossibleTypes, and only while writing to\n // the cache, since that's when selectionSetMatchesResult gives a\n // strong signal of fragment matching. The StoreReader class calls\n // policies.fragmentMatches without passing a result object, so\n // needToCheckFuzzySubtypes is always false while reading.\n var needToCheckFuzzySubtypes = !!(result && this.fuzzySubtypes.size);\n var checkingFuzzySubtypes = false;\n // It's important to keep evaluating workQueue.length each time through\n // the loop, because the queue can grow while we're iterating over it.\n for (var i = 0; i < workQueue_1.length; ++i) {\n var supertypeSet = workQueue_1[i];\n if (supertypeSet.has(supertype)) {\n if (!typenameSupertypeSet.has(supertype)) {\n if (checkingFuzzySubtypes) {\n globalThis.__DEV__ !== false && invariant.warn(6, typename, supertype);\n }\n // Record positive results for faster future lookup.\n // Unfortunately, we cannot safely cache negative results,\n // because new possibleTypes data could always be added to the\n // Policies class.\n typenameSupertypeSet.add(supertype);\n }\n return true;\n }\n supertypeSet.forEach(maybeEnqueue_1);\n if (needToCheckFuzzySubtypes &&\n // Start checking fuzzy subtypes only after exhausting all\n // non-fuzzy subtypes (after the final iteration of the loop).\n i === workQueue_1.length - 1 &&\n // We could wait to compare fragment.selectionSet to result\n // after we verify the supertype, but this check is often less\n // expensive than that search, and we will have to do the\n // comparison anyway whenever we find a potential match.\n selectionSetMatchesResult(fragment.selectionSet, result, variables)) {\n // We don't always need to check fuzzy subtypes (if no result\n // was provided, or !this.fuzzySubtypes.size), but, when we do,\n // we only want to check them once.\n needToCheckFuzzySubtypes = false;\n checkingFuzzySubtypes = true;\n // If we find any fuzzy subtypes that match typename, extend the\n // workQueue to search through the supertypes of those fuzzy\n // subtypes. Otherwise the for-loop will terminate and we'll\n // return false below.\n this.fuzzySubtypes.forEach(function (regExp, fuzzyString) {\n var match = typename.match(regExp);\n if (match && match[0] === typename) {\n maybeEnqueue_1(fuzzyString);\n }\n });\n }\n }\n }\n return false;\n };\n Policies.prototype.hasKeyArgs = function (typename, fieldName) {\n var policy = this.getFieldPolicy(typename, fieldName, false);\n return !!(policy && policy.keyFn);\n };\n Policies.prototype.getStoreFieldName = function (fieldSpec) {\n var typename = fieldSpec.typename, fieldName = fieldSpec.fieldName;\n var policy = this.getFieldPolicy(typename, fieldName, false);\n var storeFieldName;\n var keyFn = policy && policy.keyFn;\n if (keyFn && typename) {\n var context = {\n typename: typename,\n fieldName: fieldName,\n field: fieldSpec.field || null,\n variables: fieldSpec.variables,\n };\n var args = argsFromFieldSpecifier(fieldSpec);\n while (keyFn) {\n var specifierOrString = keyFn(args, context);\n if (isArray(specifierOrString)) {\n keyFn = keyArgsFnFromSpecifier(specifierOrString);\n }\n else {\n // If the custom keyFn returns a falsy value, fall back to\n // fieldName instead.\n storeFieldName = specifierOrString || fieldName;\n break;\n }\n }\n }\n if (storeFieldName === void 0) {\n storeFieldName =\n fieldSpec.field ?\n storeKeyNameFromField(fieldSpec.field, fieldSpec.variables)\n : getStoreKeyName(fieldName, argsFromFieldSpecifier(fieldSpec));\n }\n // Returning false from a keyArgs function is like configuring\n // keyArgs: false, but more dynamic.\n if (storeFieldName === false) {\n return fieldName;\n }\n // Make sure custom field names start with the actual field.name.value\n // of the field, so we can always figure out which properties of a\n // StoreObject correspond to which original field names.\n return fieldName === fieldNameFromStoreName(storeFieldName) ? storeFieldName\n : fieldName + \":\" + storeFieldName;\n };\n Policies.prototype.readField = function (options, context) {\n var objectOrReference = options.from;\n if (!objectOrReference)\n return;\n var nameOrField = options.field || options.fieldName;\n if (!nameOrField)\n return;\n if (options.typename === void 0) {\n var typename = context.store.getFieldValue(objectOrReference, \"__typename\");\n if (typename)\n options.typename = typename;\n }\n var storeFieldName = this.getStoreFieldName(options);\n var fieldName = fieldNameFromStoreName(storeFieldName);\n var existing = context.store.getFieldValue(objectOrReference, storeFieldName);\n var policy = this.getFieldPolicy(options.typename, fieldName, false);\n var read = policy && policy.read;\n if (read) {\n var readOptions = makeFieldFunctionOptions(this, objectOrReference, options, context, context.store.getStorage(isReference(objectOrReference) ?\n objectOrReference.__ref\n : objectOrReference, storeFieldName));\n // Call read(existing, readOptions) with cacheSlot holding this.cache.\n return cacheSlot.withValue(this.cache, read, [\n existing,\n readOptions,\n ]);\n }\n return existing;\n };\n Policies.prototype.getReadFunction = function (typename, fieldName) {\n var policy = this.getFieldPolicy(typename, fieldName, false);\n return policy && policy.read;\n };\n Policies.prototype.getMergeFunction = function (parentTypename, fieldName, childTypename) {\n var policy = this.getFieldPolicy(parentTypename, fieldName, false);\n var merge = policy && policy.merge;\n if (!merge && childTypename) {\n policy = this.getTypePolicy(childTypename);\n merge = policy && policy.merge;\n }\n return merge;\n };\n Policies.prototype.runMergeFunction = function (existing, incoming, _a, context, storage) {\n var field = _a.field, typename = _a.typename, merge = _a.merge;\n if (merge === mergeTrueFn) {\n // Instead of going to the trouble of creating a full\n // FieldFunctionOptions object and calling mergeTrueFn, we can\n // simply call mergeObjects, as mergeTrueFn would.\n return makeMergeObjectsFunction(context.store)(existing, incoming);\n }\n if (merge === mergeFalseFn) {\n // Likewise for mergeFalseFn, whose implementation is even simpler.\n return incoming;\n }\n // If cache.writeQuery or cache.writeFragment was called with\n // options.overwrite set to true, we still call merge functions, but\n // the existing data is always undefined, so the merge function will\n // not attempt to combine the incoming data with the existing data.\n if (context.overwrite) {\n existing = void 0;\n }\n return merge(existing, incoming, makeFieldFunctionOptions(this, \n // Unlike options.readField for read functions, we do not fall\n // back to the current object if no foreignObjOrRef is provided,\n // because it's not clear what the current object should be for\n // merge functions: the (possibly undefined) existing object, or\n // the incoming object? If you think your merge function needs\n // to read sibling fields in order to produce a new value for\n // the current field, you might want to rethink your strategy,\n // because that's a recipe for making merge behavior sensitive\n // to the order in which fields are written into the cache.\n // However, readField(name, ref) is useful for merge functions\n // that need to deduplicate child objects and references.\n void 0, {\n typename: typename,\n fieldName: field.name.value,\n field: field,\n variables: context.variables,\n }, context, storage || Object.create(null)));\n };\n return Policies;\n}());\nexport { Policies };\nfunction makeFieldFunctionOptions(policies, objectOrReference, fieldSpec, context, storage) {\n var storeFieldName = policies.getStoreFieldName(fieldSpec);\n var fieldName = fieldNameFromStoreName(storeFieldName);\n var variables = fieldSpec.variables || context.variables;\n var _a = context.store, toReference = _a.toReference, canRead = _a.canRead;\n return {\n args: argsFromFieldSpecifier(fieldSpec),\n field: fieldSpec.field || null,\n fieldName: fieldName,\n storeFieldName: storeFieldName,\n variables: variables,\n isReference: isReference,\n toReference: toReference,\n storage: storage,\n cache: policies.cache,\n canRead: canRead,\n readField: function () {\n return policies.readField(normalizeReadFieldOptions(arguments, objectOrReference, variables), context);\n },\n mergeObjects: makeMergeObjectsFunction(context.store),\n };\n}\nexport function normalizeReadFieldOptions(readFieldArgs, objectOrReference, variables) {\n var fieldNameOrOptions = readFieldArgs[0], from = readFieldArgs[1], argc = readFieldArgs.length;\n var options;\n if (typeof fieldNameOrOptions === \"string\") {\n options = {\n fieldName: fieldNameOrOptions,\n // Default to objectOrReference only when no second argument was\n // passed for the from parameter, not when undefined is explicitly\n // passed as the second argument.\n from: argc > 1 ? from : objectOrReference,\n };\n }\n else {\n options = __assign({}, fieldNameOrOptions);\n // Default to objectOrReference only when fieldNameOrOptions.from is\n // actually omitted, rather than just undefined.\n if (!hasOwn.call(options, \"from\")) {\n options.from = objectOrReference;\n }\n }\n if (globalThis.__DEV__ !== false && options.from === void 0) {\n globalThis.__DEV__ !== false && invariant.warn(7, stringifyForDisplay(Array.from(readFieldArgs)));\n }\n if (void 0 === options.variables) {\n options.variables = variables;\n }\n return options;\n}\nfunction makeMergeObjectsFunction(store) {\n return function mergeObjects(existing, incoming) {\n if (isArray(existing) || isArray(incoming)) {\n throw newInvariantError(8);\n }\n // These dynamic checks are necessary because the parameters of a\n // custom merge function can easily have the any type, so the type\n // system cannot always enforce the StoreObject | Reference parameter\n // types of options.mergeObjects.\n if (isNonNullObject(existing) && isNonNullObject(incoming)) {\n var eType = store.getFieldValue(existing, \"__typename\");\n var iType = store.getFieldValue(incoming, \"__typename\");\n var typesDiffer = eType && iType && eType !== iType;\n if (typesDiffer) {\n return incoming;\n }\n if (isReference(existing) && storeValueIsStoreObject(incoming)) {\n // Update the normalized EntityStore for the entity identified by\n // existing.__ref, preferring/overwriting any fields contributed by the\n // newer incoming StoreObject.\n store.merge(existing.__ref, incoming);\n return existing;\n }\n if (storeValueIsStoreObject(existing) && isReference(incoming)) {\n // Update the normalized EntityStore for the entity identified by\n // incoming.__ref, taking fields from the older existing object only if\n // those fields are not already present in the newer StoreObject\n // identified by incoming.__ref.\n store.merge(existing, incoming.__ref);\n return incoming;\n }\n if (storeValueIsStoreObject(existing) &&\n storeValueIsStoreObject(incoming)) {\n return __assign(__assign({}, existing), incoming);\n }\n }\n return incoming;\n };\n}\n//# sourceMappingURL=policies.js.map","import { __assign } from \"tslib\";\nimport { invariant, newInvariantError } from \"../../utilities/globals/index.js\";\nimport { equal } from \"@wry/equality\";\nimport { Trie } from \"@wry/trie\";\nimport { Kind } from \"graphql\";\nimport { getFragmentFromSelection, getDefaultValues, getOperationDefinition, getTypenameFromResult, makeReference, isField, resultKeyNameFromField, isReference, shouldInclude, cloneDeep, addTypenameToDocument, isNonEmptyArray, argumentsObjectFromField, canonicalStringify, } from \"../../utilities/index.js\";\nimport { isArray, makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject, extractFragmentContext, } from \"./helpers.js\";\nimport { normalizeReadFieldOptions } from \"./policies.js\";\n// Since there are only four possible combinations of context.clientOnly and\n// context.deferred values, we should need at most four \"flavors\" of any given\n// WriteContext. To avoid creating multiple copies of the same context, we cache\n// the contexts in the context.flavors Map (shared by all flavors) according to\n// their clientOnly and deferred values (always in that order).\nfunction getContextFlavor(context, clientOnly, deferred) {\n var key = \"\".concat(clientOnly).concat(deferred);\n var flavored = context.flavors.get(key);\n if (!flavored) {\n context.flavors.set(key, (flavored =\n context.clientOnly === clientOnly && context.deferred === deferred ?\n context\n : __assign(__assign({}, context), { clientOnly: clientOnly, deferred: deferred })));\n }\n return flavored;\n}\nvar StoreWriter = /** @class */ (function () {\n function StoreWriter(cache, reader, fragments) {\n this.cache = cache;\n this.reader = reader;\n this.fragments = fragments;\n }\n StoreWriter.prototype.writeToStore = function (store, _a) {\n var _this = this;\n var query = _a.query, result = _a.result, dataId = _a.dataId, variables = _a.variables, overwrite = _a.overwrite;\n var operationDefinition = getOperationDefinition(query);\n var merger = makeProcessedFieldsMerger();\n variables = __assign(__assign({}, getDefaultValues(operationDefinition)), variables);\n var context = __assign(__assign({ store: store, written: Object.create(null), merge: function (existing, incoming) {\n return merger.merge(existing, incoming);\n }, variables: variables, varString: canonicalStringify(variables) }, extractFragmentContext(query, this.fragments)), { overwrite: !!overwrite, incomingById: new Map(), clientOnly: false, deferred: false, flavors: new Map() });\n var ref = this.processSelectionSet({\n result: result || Object.create(null),\n dataId: dataId,\n selectionSet: operationDefinition.selectionSet,\n mergeTree: { map: new Map() },\n context: context,\n });\n if (!isReference(ref)) {\n throw newInvariantError(11, result);\n }\n // So far, the store has not been modified, so now it's time to process\n // context.incomingById and merge those incoming fields into context.store.\n context.incomingById.forEach(function (_a, dataId) {\n var storeObject = _a.storeObject, mergeTree = _a.mergeTree, fieldNodeSet = _a.fieldNodeSet;\n var entityRef = makeReference(dataId);\n if (mergeTree && mergeTree.map.size) {\n var applied = _this.applyMerges(mergeTree, entityRef, storeObject, context);\n if (isReference(applied)) {\n // Assume References returned by applyMerges have already been merged\n // into the store. See makeMergeObjectsFunction in policies.ts for an\n // example of how this can happen.\n return;\n }\n // Otherwise, applyMerges returned a StoreObject, whose fields we should\n // merge into the store (see store.merge statement below).\n storeObject = applied;\n }\n if (globalThis.__DEV__ !== false && !context.overwrite) {\n var fieldsWithSelectionSets_1 = Object.create(null);\n fieldNodeSet.forEach(function (field) {\n if (field.selectionSet) {\n fieldsWithSelectionSets_1[field.name.value] = true;\n }\n });\n var hasSelectionSet_1 = function (storeFieldName) {\n return fieldsWithSelectionSets_1[fieldNameFromStoreName(storeFieldName)] ===\n true;\n };\n var hasMergeFunction_1 = function (storeFieldName) {\n var childTree = mergeTree && mergeTree.map.get(storeFieldName);\n return Boolean(childTree && childTree.info && childTree.info.merge);\n };\n Object.keys(storeObject).forEach(function (storeFieldName) {\n // If a merge function was defined for this field, trust that it\n // did the right thing about (not) clobbering data. If the field\n // has no selection set, it's a scalar field, so it doesn't need\n // a merge function (even if it's an object, like JSON data).\n if (hasSelectionSet_1(storeFieldName) &&\n !hasMergeFunction_1(storeFieldName)) {\n warnAboutDataLoss(entityRef, storeObject, storeFieldName, context.store);\n }\n });\n }\n store.merge(dataId, storeObject);\n });\n // Any IDs written explicitly to the cache will be retained as\n // reachable root IDs for garbage collection purposes. Although this\n // logic includes root IDs like ROOT_QUERY and ROOT_MUTATION, their\n // retainment counts are effectively ignored because cache.gc() always\n // includes them in its root ID set.\n store.retain(ref.__ref);\n return ref;\n };\n StoreWriter.prototype.processSelectionSet = function (_a) {\n var _this = this;\n var dataId = _a.dataId, result = _a.result, selectionSet = _a.selectionSet, context = _a.context, \n // This object allows processSelectionSet to report useful information\n // to its callers without explicitly returning that information.\n mergeTree = _a.mergeTree;\n var policies = this.cache.policies;\n // This variable will be repeatedly updated using context.merge to\n // accumulate all fields that need to be written into the store.\n var incoming = Object.create(null);\n // If typename was not passed in, infer it. Note that typename is\n // always passed in for tricky-to-infer cases such as \"Query\" for\n // ROOT_QUERY.\n var typename = (dataId && policies.rootTypenamesById[dataId]) ||\n getTypenameFromResult(result, selectionSet, context.fragmentMap) ||\n (dataId && context.store.get(dataId, \"__typename\"));\n if (\"string\" === typeof typename) {\n incoming.__typename = typename;\n }\n // This readField function will be passed as context.readField in the\n // KeyFieldsContext object created within policies.identify (called below).\n // In addition to reading from the existing context.store (thanks to the\n // policies.readField(options, context) line at the very bottom), this\n // version of readField can read from Reference objects that are currently\n // pending in context.incomingById, which is important whenever keyFields\n // need to be extracted from a child object that processSelectionSet has\n // turned into a Reference.\n var readField = function () {\n var options = normalizeReadFieldOptions(arguments, incoming, context.variables);\n if (isReference(options.from)) {\n var info = context.incomingById.get(options.from.__ref);\n if (info) {\n var result_1 = policies.readField(__assign(__assign({}, options), { from: info.storeObject }), context);\n if (result_1 !== void 0) {\n return result_1;\n }\n }\n }\n return policies.readField(options, context);\n };\n var fieldNodeSet = new Set();\n this.flattenFields(selectionSet, result, \n // This WriteContext will be the default context value for fields returned\n // by the flattenFields method, but some fields may be assigned a modified\n // context, depending on the presence of @client and other directives.\n context, typename).forEach(function (context, field) {\n var _a;\n var resultFieldKey = resultKeyNameFromField(field);\n var value = result[resultFieldKey];\n fieldNodeSet.add(field);\n if (value !== void 0) {\n var storeFieldName = policies.getStoreFieldName({\n typename: typename,\n fieldName: field.name.value,\n field: field,\n variables: context.variables,\n });\n var childTree = getChildMergeTree(mergeTree, storeFieldName);\n var incomingValue = _this.processFieldValue(value, field, \n // Reset context.clientOnly and context.deferred to their default\n // values before processing nested selection sets.\n field.selectionSet ?\n getContextFlavor(context, false, false)\n : context, childTree);\n // To determine if this field holds a child object with a merge function\n // defined in its type policy (see PR #7070), we need to figure out the\n // child object's __typename.\n var childTypename = void 0;\n // The field's value can be an object that has a __typename only if the\n // field has a selection set. Otherwise incomingValue is scalar.\n if (field.selectionSet &&\n (isReference(incomingValue) || storeValueIsStoreObject(incomingValue))) {\n childTypename = readField(\"__typename\", incomingValue);\n }\n var merge = policies.getMergeFunction(typename, field.name.value, childTypename);\n if (merge) {\n childTree.info = {\n // TODO Check compatibility against any existing childTree.field?\n field: field,\n typename: typename,\n merge: merge,\n };\n }\n else {\n maybeRecycleChildMergeTree(mergeTree, storeFieldName);\n }\n incoming = context.merge(incoming, (_a = {},\n _a[storeFieldName] = incomingValue,\n _a));\n }\n else if (globalThis.__DEV__ !== false &&\n !context.clientOnly &&\n !context.deferred &&\n !addTypenameToDocument.added(field) &&\n // If the field has a read function, it may be a synthetic field or\n // provide a default value, so its absence from the written data should\n // not be cause for alarm.\n !policies.getReadFunction(typename, field.name.value)) {\n globalThis.__DEV__ !== false && invariant.error(12, resultKeyNameFromField(field), result);\n }\n });\n // Identify the result object, even if dataId was already provided,\n // since we always need keyObject below.\n try {\n var _b = policies.identify(result, {\n typename: typename,\n selectionSet: selectionSet,\n fragmentMap: context.fragmentMap,\n storeObject: incoming,\n readField: readField,\n }), id = _b[0], keyObject = _b[1];\n // If dataId was not provided, fall back to the id just generated by\n // policies.identify.\n dataId = dataId || id;\n // Write any key fields that were used during identification, even if\n // they were not mentioned in the original query.\n if (keyObject) {\n // TODO Reverse the order of the arguments?\n incoming = context.merge(incoming, keyObject);\n }\n }\n catch (e) {\n // If dataId was provided, tolerate failure of policies.identify.\n if (!dataId)\n throw e;\n }\n if (\"string\" === typeof dataId) {\n var dataRef = makeReference(dataId);\n // Avoid processing the same entity object using the same selection\n // set more than once. We use an array instead of a Set since most\n // entity IDs will be written using only one selection set, so the\n // size of this array is likely to be very small, meaning indexOf is\n // likely to be faster than Set.prototype.has.\n var sets = context.written[dataId] || (context.written[dataId] = []);\n if (sets.indexOf(selectionSet) >= 0)\n return dataRef;\n sets.push(selectionSet);\n // If we're about to write a result object into the store, but we\n // happen to know that the exact same (===) result object would be\n // returned if we were to reread the result with the same inputs,\n // then we can skip the rest of the processSelectionSet work for\n // this object, and immediately return a Reference to it.\n if (this.reader &&\n this.reader.isFresh(result, dataRef, selectionSet, context)) {\n return dataRef;\n }\n var previous_1 = context.incomingById.get(dataId);\n if (previous_1) {\n previous_1.storeObject = context.merge(previous_1.storeObject, incoming);\n previous_1.mergeTree = mergeMergeTrees(previous_1.mergeTree, mergeTree);\n fieldNodeSet.forEach(function (field) { return previous_1.fieldNodeSet.add(field); });\n }\n else {\n context.incomingById.set(dataId, {\n storeObject: incoming,\n // Save a reference to mergeTree only if it is not empty, because\n // empty MergeTrees may be recycled by maybeRecycleChildMergeTree and\n // reused for entirely different parts of the result tree.\n mergeTree: mergeTreeIsEmpty(mergeTree) ? void 0 : mergeTree,\n fieldNodeSet: fieldNodeSet,\n });\n }\n return dataRef;\n }\n return incoming;\n };\n StoreWriter.prototype.processFieldValue = function (value, field, context, mergeTree) {\n var _this = this;\n if (!field.selectionSet || value === null) {\n // In development, we need to clone scalar values so that they can be\n // safely frozen with maybeDeepFreeze in readFromStore.ts. In production,\n // it's cheaper to store the scalar values directly in the cache.\n return globalThis.__DEV__ !== false ? cloneDeep(value) : value;\n }\n if (isArray(value)) {\n return value.map(function (item, i) {\n var value = _this.processFieldValue(item, field, context, getChildMergeTree(mergeTree, i));\n maybeRecycleChildMergeTree(mergeTree, i);\n return value;\n });\n }\n return this.processSelectionSet({\n result: value,\n selectionSet: field.selectionSet,\n context: context,\n mergeTree: mergeTree,\n });\n };\n // Implements https://spec.graphql.org/draft/#sec-Field-Collection, but with\n // some additions for tracking @client and @defer directives.\n StoreWriter.prototype.flattenFields = function (selectionSet, result, context, typename) {\n if (typename === void 0) { typename = getTypenameFromResult(result, selectionSet, context.fragmentMap); }\n var fieldMap = new Map();\n var policies = this.cache.policies;\n var limitingTrie = new Trie(false); // No need for WeakMap, since limitingTrie does not escape.\n (function flatten(selectionSet, inheritedContext) {\n var visitedNode = limitingTrie.lookup(selectionSet, \n // Because we take inheritedClientOnly and inheritedDeferred into\n // consideration here (in addition to selectionSet), it's possible for\n // the same selection set to be flattened more than once, if it appears\n // in the query with different @client and/or @directive configurations.\n inheritedContext.clientOnly, inheritedContext.deferred);\n if (visitedNode.visited)\n return;\n visitedNode.visited = true;\n selectionSet.selections.forEach(function (selection) {\n if (!shouldInclude(selection, context.variables))\n return;\n var clientOnly = inheritedContext.clientOnly, deferred = inheritedContext.deferred;\n if (\n // Since the presence of @client or @defer on this field can only\n // cause clientOnly or deferred to become true, we can skip the\n // forEach loop if both clientOnly and deferred are already true.\n !(clientOnly && deferred) &&\n isNonEmptyArray(selection.directives)) {\n selection.directives.forEach(function (dir) {\n var name = dir.name.value;\n if (name === \"client\")\n clientOnly = true;\n if (name === \"defer\") {\n var args = argumentsObjectFromField(dir, context.variables);\n // The @defer directive takes an optional args.if boolean\n // argument, similar to @include(if: boolean). Note that\n // @defer(if: false) does not make context.deferred false, but\n // instead behaves as if there was no @defer directive.\n if (!args || args.if !== false) {\n deferred = true;\n }\n // TODO In the future, we may want to record args.label using\n // context.deferred, if a label is specified.\n }\n });\n }\n if (isField(selection)) {\n var existing = fieldMap.get(selection);\n if (existing) {\n // If this field has been visited along another recursive path\n // before, the final context should have clientOnly or deferred set\n // to true only if *all* paths have the directive (hence the &&).\n clientOnly = clientOnly && existing.clientOnly;\n deferred = deferred && existing.deferred;\n }\n fieldMap.set(selection, getContextFlavor(context, clientOnly, deferred));\n }\n else {\n var fragment = getFragmentFromSelection(selection, context.lookupFragment);\n if (!fragment && selection.kind === Kind.FRAGMENT_SPREAD) {\n throw newInvariantError(13, selection.name.value);\n }\n if (fragment &&\n policies.fragmentMatches(fragment, typename, result, context.variables)) {\n flatten(fragment.selectionSet, getContextFlavor(context, clientOnly, deferred));\n }\n }\n });\n })(selectionSet, context);\n return fieldMap;\n };\n StoreWriter.prototype.applyMerges = function (mergeTree, existing, incoming, context, getStorageArgs) {\n var _a;\n var _this = this;\n if (mergeTree.map.size && !isReference(incoming)) {\n var e_1 = \n // Items in the same position in different arrays are not\n // necessarily related to each other, so when incoming is an array\n // we process its elements as if there was no existing data.\n (!isArray(incoming) &&\n // Likewise, existing must be either a Reference or a StoreObject\n // in order for its fields to be safe to merge with the fields of\n // the incoming object.\n (isReference(existing) || storeValueIsStoreObject(existing))) ?\n existing\n : void 0;\n // This narrowing is implied by mergeTree.map.size > 0 and\n // !isReference(incoming), though TypeScript understandably cannot\n // hope to infer this type.\n var i_1 = incoming;\n // The options.storage objects provided to read and merge functions\n // are derived from the identity of the parent object plus a\n // sequence of storeFieldName strings/numbers identifying the nested\n // field name path of each field value to be merged.\n if (e_1 && !getStorageArgs) {\n getStorageArgs = [isReference(e_1) ? e_1.__ref : e_1];\n }\n // It's possible that applying merge functions to this subtree will\n // not change the incoming data, so this variable tracks the fields\n // that did change, so we can create a new incoming object when (and\n // only when) at least one incoming field has changed. We use a Map\n // to preserve the type of numeric keys.\n var changedFields_1;\n var getValue_1 = function (from, name) {\n return (isArray(from) ?\n typeof name === \"number\" ?\n from[name]\n : void 0\n : context.store.getFieldValue(from, String(name)));\n };\n mergeTree.map.forEach(function (childTree, storeFieldName) {\n var eVal = getValue_1(e_1, storeFieldName);\n var iVal = getValue_1(i_1, storeFieldName);\n // If we have no incoming data, leave any existing data untouched.\n if (void 0 === iVal)\n return;\n if (getStorageArgs) {\n getStorageArgs.push(storeFieldName);\n }\n var aVal = _this.applyMerges(childTree, eVal, iVal, context, getStorageArgs);\n if (aVal !== iVal) {\n changedFields_1 = changedFields_1 || new Map();\n changedFields_1.set(storeFieldName, aVal);\n }\n if (getStorageArgs) {\n invariant(getStorageArgs.pop() === storeFieldName);\n }\n });\n if (changedFields_1) {\n // Shallow clone i so we can add changed fields to it.\n incoming = (isArray(i_1) ? i_1.slice(0) : __assign({}, i_1));\n changedFields_1.forEach(function (value, name) {\n incoming[name] = value;\n });\n }\n }\n if (mergeTree.info) {\n return this.cache.policies.runMergeFunction(existing, incoming, mergeTree.info, context, getStorageArgs && (_a = context.store).getStorage.apply(_a, getStorageArgs));\n }\n return incoming;\n };\n return StoreWriter;\n}());\nexport { StoreWriter };\nvar emptyMergeTreePool = [];\nfunction getChildMergeTree(_a, name) {\n var map = _a.map;\n if (!map.has(name)) {\n map.set(name, emptyMergeTreePool.pop() || { map: new Map() });\n }\n return map.get(name);\n}\nfunction mergeMergeTrees(left, right) {\n if (left === right || !right || mergeTreeIsEmpty(right))\n return left;\n if (!left || mergeTreeIsEmpty(left))\n return right;\n var info = left.info && right.info ? __assign(__assign({}, left.info), right.info) : left.info || right.info;\n var needToMergeMaps = left.map.size && right.map.size;\n var map = needToMergeMaps ? new Map()\n : left.map.size ? left.map\n : right.map;\n var merged = { info: info, map: map };\n if (needToMergeMaps) {\n var remainingRightKeys_1 = new Set(right.map.keys());\n left.map.forEach(function (leftTree, key) {\n merged.map.set(key, mergeMergeTrees(leftTree, right.map.get(key)));\n remainingRightKeys_1.delete(key);\n });\n remainingRightKeys_1.forEach(function (key) {\n merged.map.set(key, mergeMergeTrees(right.map.get(key), left.map.get(key)));\n });\n }\n return merged;\n}\nfunction mergeTreeIsEmpty(tree) {\n return !tree || !(tree.info || tree.map.size);\n}\nfunction maybeRecycleChildMergeTree(_a, name) {\n var map = _a.map;\n var childTree = map.get(name);\n if (childTree && mergeTreeIsEmpty(childTree)) {\n emptyMergeTreePool.push(childTree);\n map.delete(name);\n }\n}\nvar warnings = new Set();\n// Note that this function is unused in production, and thus should be\n// pruned by any well-configured minifier.\nfunction warnAboutDataLoss(existingRef, incomingObj, storeFieldName, store) {\n var getChild = function (objOrRef) {\n var child = store.getFieldValue(objOrRef, storeFieldName);\n return typeof child === \"object\" && child;\n };\n var existing = getChild(existingRef);\n if (!existing)\n return;\n var incoming = getChild(incomingObj);\n if (!incoming)\n return;\n // It's always safe to replace a reference, since it refers to data\n // safely stored elsewhere.\n if (isReference(existing))\n return;\n // If the values are structurally equivalent, we do not need to worry\n // about incoming replacing existing.\n if (equal(existing, incoming))\n return;\n // If we're replacing every key of the existing object, then the\n // existing data would be overwritten even if the objects were\n // normalized, so warning would not be helpful here.\n if (Object.keys(existing).every(function (key) { return store.getFieldValue(incoming, key) !== void 0; })) {\n return;\n }\n var parentType = store.getFieldValue(existingRef, \"__typename\") ||\n store.getFieldValue(incomingObj, \"__typename\");\n var fieldName = fieldNameFromStoreName(storeFieldName);\n var typeDotName = \"\".concat(parentType, \".\").concat(fieldName);\n // Avoid warning more than once for the same type and field name.\n if (warnings.has(typeDotName))\n return;\n warnings.add(typeDotName);\n var childTypenames = [];\n // Arrays do not have __typename fields, and always need a custom merge\n // function, even if their elements are normalized entities.\n if (!isArray(existing) && !isArray(incoming)) {\n [existing, incoming].forEach(function (child) {\n var typename = store.getFieldValue(child, \"__typename\");\n if (typeof typename === \"string\" && !childTypenames.includes(typename)) {\n childTypenames.push(typename);\n }\n });\n }\n globalThis.__DEV__ !== false && invariant.warn(14, fieldName, parentType, childTypenames.length ?\n \"either ensure all objects of type \" +\n childTypenames.join(\" and \") +\n \" have an ID or a custom merge function, or \"\n : \"\", typeDotName, __assign({}, existing), __assign({}, incoming));\n}\n//# sourceMappingURL=writeToStore.js.map","import { __assign, __extends } from \"tslib\";\nimport { invariant } from \"../../utilities/globals/index.js\";\n// Make builtins like Map and Set safe to use with non-extensible objects.\nimport \"./fixPolyfills.js\";\nimport { wrap } from \"optimism\";\nimport { equal } from \"@wry/equality\";\nimport { ApolloCache } from \"../core/cache.js\";\nimport { MissingFieldError } from \"../core/types/common.js\";\nimport { addTypenameToDocument, isReference, DocumentTransform, canonicalStringify, print, cacheSizes, } from \"../../utilities/index.js\";\nimport { StoreReader } from \"./readFromStore.js\";\nimport { StoreWriter } from \"./writeToStore.js\";\nimport { EntityStore, supportsResultCaching } from \"./entityStore.js\";\nimport { makeVar, forgetCache, recallCache } from \"./reactiveVars.js\";\nimport { Policies } from \"./policies.js\";\nimport { hasOwn, normalizeConfig, shouldCanonizeResults } from \"./helpers.js\";\nimport { getInMemoryCacheMemoryInternals } from \"../../utilities/caching/getMemoryInternals.js\";\nvar InMemoryCache = /** @class */ (function (_super) {\n __extends(InMemoryCache, _super);\n function InMemoryCache(config) {\n if (config === void 0) { config = {}; }\n var _this = _super.call(this) || this;\n _this.watches = new Set();\n _this.addTypenameTransform = new DocumentTransform(addTypenameToDocument);\n // Override the default value, since InMemoryCache result objects are frozen\n // in development and expected to remain logically immutable in production.\n _this.assumeImmutableResults = true;\n _this.makeVar = makeVar;\n _this.txCount = 0;\n _this.config = normalizeConfig(config);\n _this.addTypename = !!_this.config.addTypename;\n _this.policies = new Policies({\n cache: _this,\n dataIdFromObject: _this.config.dataIdFromObject,\n possibleTypes: _this.config.possibleTypes,\n typePolicies: _this.config.typePolicies,\n });\n _this.init();\n return _this;\n }\n InMemoryCache.prototype.init = function () {\n // Passing { resultCaching: false } in the InMemoryCache constructor options\n // will completely disable dependency tracking, which will improve memory\n // usage but worsen the performance of repeated reads.\n var rootStore = (this.data = new EntityStore.Root({\n policies: this.policies,\n resultCaching: this.config.resultCaching,\n }));\n // When no optimistic writes are currently active, cache.optimisticData ===\n // cache.data, so there are no additional layers on top of the actual data.\n // When an optimistic update happens, this.optimisticData will become a\n // linked list of EntityStore Layer objects that terminates with the\n // original this.data cache object.\n this.optimisticData = rootStore.stump;\n this.resetResultCache();\n };\n InMemoryCache.prototype.resetResultCache = function (resetResultIdentities) {\n var _this = this;\n var previousReader = this.storeReader;\n var fragments = this.config.fragments;\n // The StoreWriter is mostly stateless and so doesn't really need to be\n // reset, but it does need to have its writer.storeReader reference updated,\n // so it's simpler to update this.storeWriter as well.\n this.storeWriter = new StoreWriter(this, (this.storeReader = new StoreReader({\n cache: this,\n addTypename: this.addTypename,\n resultCacheMaxSize: this.config.resultCacheMaxSize,\n canonizeResults: shouldCanonizeResults(this.config),\n canon: resetResultIdentities ? void 0 : (previousReader && previousReader.canon),\n fragments: fragments,\n })), fragments);\n this.maybeBroadcastWatch = wrap(function (c, options) {\n return _this.broadcastWatch(c, options);\n }, {\n max: this.config.resultCacheMaxSize ||\n cacheSizes[\"inMemoryCache.maybeBroadcastWatch\"] ||\n 5000 /* defaultCacheSizes[\"inMemoryCache.maybeBroadcastWatch\"] */,\n makeCacheKey: function (c) {\n // Return a cache key (thus enabling result caching) only if we're\n // currently using a data store that can track cache dependencies.\n var store = c.optimistic ? _this.optimisticData : _this.data;\n if (supportsResultCaching(store)) {\n var optimistic = c.optimistic, id = c.id, variables = c.variables;\n return store.makeCacheKey(c.query, \n // Different watches can have the same query, optimistic\n // status, rootId, and variables, but if their callbacks are\n // different, the (identical) result needs to be delivered to\n // each distinct callback. The easiest way to achieve that\n // separation is to include c.callback in the cache key for\n // maybeBroadcastWatch calls. See issue #5733.\n c.callback, canonicalStringify({ optimistic: optimistic, id: id, variables: variables }));\n }\n },\n });\n // Since we have thrown away all the cached functions that depend on the\n // CacheGroup dependencies maintained by EntityStore, we should also reset\n // all CacheGroup dependency information.\n new Set([this.data.group, this.optimisticData.group]).forEach(function (group) {\n return group.resetCaching();\n });\n };\n InMemoryCache.prototype.restore = function (data) {\n this.init();\n // Since calling this.init() discards/replaces the entire StoreReader, along\n // with the result caches it maintains, this.data.replace(data) won't have\n // to bother deleting the old data.\n if (data)\n this.data.replace(data);\n return this;\n };\n InMemoryCache.prototype.extract = function (optimistic) {\n if (optimistic === void 0) { optimistic = false; }\n return (optimistic ? this.optimisticData : this.data).extract();\n };\n InMemoryCache.prototype.read = function (options) {\n var \n // Since read returns data or null, without any additional metadata\n // about whether/where there might have been missing fields, the\n // default behavior cannot be returnPartialData = true (like it is\n // for the diff method), since defaulting to true would violate the\n // integrity of the T in the return type. However, partial data may\n // be useful in some cases, so returnPartialData:true may be\n // specified explicitly.\n _a = options.returnPartialData, \n // Since read returns data or null, without any additional metadata\n // about whether/where there might have been missing fields, the\n // default behavior cannot be returnPartialData = true (like it is\n // for the diff method), since defaulting to true would violate the\n // integrity of the T in the return type. However, partial data may\n // be useful in some cases, so returnPartialData:true may be\n // specified explicitly.\n returnPartialData = _a === void 0 ? false : _a;\n try {\n return (this.storeReader.diffQueryAgainstStore(__assign(__assign({}, options), { store: options.optimistic ? this.optimisticData : this.data, config: this.config, returnPartialData: returnPartialData })).result || null);\n }\n catch (e) {\n if (e instanceof MissingFieldError) {\n // Swallow MissingFieldError and return null, so callers do not need to\n // worry about catching \"normal\" exceptions resulting from incomplete\n // cache data. Unexpected errors will be re-thrown. If you need more\n // information about which fields were missing, use cache.diff instead,\n // and examine diffResult.missing.\n return null;\n }\n throw e;\n }\n };\n InMemoryCache.prototype.write = function (options) {\n try {\n ++this.txCount;\n return this.storeWriter.writeToStore(this.data, options);\n }\n finally {\n if (!--this.txCount && options.broadcast !== false) {\n this.broadcastWatches();\n }\n }\n };\n InMemoryCache.prototype.modify = function (options) {\n if (hasOwn.call(options, \"id\") && !options.id) {\n // To my knowledge, TypeScript does not currently provide a way to\n // enforce that an optional property?:type must *not* be undefined\n // when present. That ability would be useful here, because we want\n // options.id to default to ROOT_QUERY only when no options.id was\n // provided. If the caller attempts to pass options.id with a\n // falsy/undefined value (perhaps because cache.identify failed), we\n // should not assume the goal was to modify the ROOT_QUERY object.\n // We could throw, but it seems natural to return false to indicate\n // that nothing was modified.\n return false;\n }\n var store = ((options.optimistic) // Defaults to false.\n ) ?\n this.optimisticData\n : this.data;\n try {\n ++this.txCount;\n return store.modify(options.id || \"ROOT_QUERY\", options.fields);\n }\n finally {\n if (!--this.txCount && options.broadcast !== false) {\n this.broadcastWatches();\n }\n }\n };\n InMemoryCache.prototype.diff = function (options) {\n return this.storeReader.diffQueryAgainstStore(__assign(__assign({}, options), { store: options.optimistic ? this.optimisticData : this.data, rootId: options.id || \"ROOT_QUERY\", config: this.config }));\n };\n InMemoryCache.prototype.watch = function (watch) {\n var _this = this;\n if (!this.watches.size) {\n // In case we previously called forgetCache(this) because\n // this.watches became empty (see below), reattach this cache to any\n // reactive variables on which it previously depended. It might seem\n // paradoxical that we're able to recall something we supposedly\n // forgot, but the point of calling forgetCache(this) is to silence\n // useless broadcasts while this.watches is empty, and to allow the\n // cache to be garbage collected. If, however, we manage to call\n // recallCache(this) here, this cache object must not have been\n // garbage collected yet, and should resume receiving updates from\n // reactive variables, now that it has a watcher to notify.\n recallCache(this);\n }\n this.watches.add(watch);\n if (watch.immediate) {\n this.maybeBroadcastWatch(watch);\n }\n return function () {\n // Once we remove the last watch from this.watches, cache.broadcastWatches\n // no longer does anything, so we preemptively tell the reactive variable\n // system to exclude this cache from future broadcasts.\n if (_this.watches.delete(watch) && !_this.watches.size) {\n forgetCache(_this);\n }\n // Remove this watch from the LRU cache managed by the\n // maybeBroadcastWatch OptimisticWrapperFunction, to prevent memory\n // leaks involving the closure of watch.callback.\n _this.maybeBroadcastWatch.forget(watch);\n };\n };\n InMemoryCache.prototype.gc = function (options) {\n var _a;\n canonicalStringify.reset();\n print.reset();\n this.addTypenameTransform.resetCache();\n (_a = this.config.fragments) === null || _a === void 0 ? void 0 : _a.resetCaches();\n var ids = this.optimisticData.gc();\n if (options && !this.txCount) {\n if (options.resetResultCache) {\n this.resetResultCache(options.resetResultIdentities);\n }\n else if (options.resetResultIdentities) {\n this.storeReader.resetCanon();\n }\n }\n return ids;\n };\n // Call this method to ensure the given root ID remains in the cache after\n // garbage collection, along with its transitive child entities. Note that\n // the cache automatically retains all directly written entities. By default,\n // the retainment persists after optimistic updates are removed. Pass true\n // for the optimistic argument if you would prefer for the retainment to be\n // discarded when the top-most optimistic layer is removed. Returns the\n // resulting (non-negative) retainment count.\n InMemoryCache.prototype.retain = function (rootId, optimistic) {\n return (optimistic ? this.optimisticData : this.data).retain(rootId);\n };\n // Call this method to undo the effect of the retain method, above. Once the\n // retainment count falls to zero, the given ID will no longer be preserved\n // during garbage collection, though it may still be preserved by other safe\n // entities that refer to it. Returns the resulting (non-negative) retainment\n // count, in case that's useful.\n InMemoryCache.prototype.release = function (rootId, optimistic) {\n return (optimistic ? this.optimisticData : this.data).release(rootId);\n };\n // Returns the canonical ID for a given StoreObject, obeying typePolicies\n // and keyFields (and dataIdFromObject, if you still use that). At minimum,\n // the object must contain a __typename and any primary key fields required\n // to identify entities of that type. If you pass a query result object, be\n // sure that none of the primary key fields have been renamed by aliasing.\n // If you pass a Reference object, its __ref ID string will be returned.\n InMemoryCache.prototype.identify = function (object) {\n if (isReference(object))\n return object.__ref;\n try {\n return this.policies.identify(object)[0];\n }\n catch (e) {\n globalThis.__DEV__ !== false && invariant.warn(e);\n }\n };\n InMemoryCache.prototype.evict = function (options) {\n if (!options.id) {\n if (hasOwn.call(options, \"id\")) {\n // See comment in modify method about why we return false when\n // options.id exists but is falsy/undefined.\n return false;\n }\n options = __assign(__assign({}, options), { id: \"ROOT_QUERY\" });\n }\n try {\n // It's unlikely that the eviction will end up invoking any other\n // cache update operations while it's running, but {in,de}crementing\n // this.txCount still seems like a good idea, for uniformity with\n // the other update methods.\n ++this.txCount;\n // Pass this.data as a limit on the depth of the eviction, so evictions\n // during optimistic updates (when this.data is temporarily set equal to\n // this.optimisticData) do not escape their optimistic Layer.\n return this.optimisticData.evict(options, this.data);\n }\n finally {\n if (!--this.txCount && options.broadcast !== false) {\n this.broadcastWatches();\n }\n }\n };\n InMemoryCache.prototype.reset = function (options) {\n var _this = this;\n this.init();\n canonicalStringify.reset();\n if (options && options.discardWatches) {\n // Similar to what happens in the unsubscribe function returned by\n // cache.watch, applied to all current watches.\n this.watches.forEach(function (watch) { return _this.maybeBroadcastWatch.forget(watch); });\n this.watches.clear();\n forgetCache(this);\n }\n else {\n // Calling this.init() above unblocks all maybeBroadcastWatch caching, so\n // this.broadcastWatches() triggers a broadcast to every current watcher\n // (letting them know their data is now missing). This default behavior is\n // convenient because it means the watches do not have to be manually\n // reestablished after resetting the cache. To prevent this broadcast and\n // cancel all watches, pass true for options.discardWatches.\n this.broadcastWatches();\n }\n return Promise.resolve();\n };\n InMemoryCache.prototype.removeOptimistic = function (idToRemove) {\n var newOptimisticData = this.optimisticData.removeLayer(idToRemove);\n if (newOptimisticData !== this.optimisticData) {\n this.optimisticData = newOptimisticData;\n this.broadcastWatches();\n }\n };\n InMemoryCache.prototype.batch = function (options) {\n var _this = this;\n var update = options.update, _a = options.optimistic, optimistic = _a === void 0 ? true : _a, removeOptimistic = options.removeOptimistic, onWatchUpdated = options.onWatchUpdated;\n var updateResult;\n var perform = function (layer) {\n var _a = _this, data = _a.data, optimisticData = _a.optimisticData;\n ++_this.txCount;\n if (layer) {\n _this.data = _this.optimisticData = layer;\n }\n try {\n return (updateResult = update(_this));\n }\n finally {\n --_this.txCount;\n _this.data = data;\n _this.optimisticData = optimisticData;\n }\n };\n var alreadyDirty = new Set();\n if (onWatchUpdated && !this.txCount) {\n // If an options.onWatchUpdated callback is provided, we want to call it\n // with only the Cache.WatchOptions objects affected by options.update,\n // but there might be dirty watchers already waiting to be broadcast that\n // have nothing to do with the update. To prevent including those watchers\n // in the post-update broadcast, we perform this initial broadcast to\n // collect the dirty watchers, so we can re-dirty them later, after the\n // post-update broadcast, allowing them to receive their pending\n // broadcasts the next time broadcastWatches is called, just as they would\n // if we never called cache.batch.\n this.broadcastWatches(__assign(__assign({}, options), { onWatchUpdated: function (watch) {\n alreadyDirty.add(watch);\n return false;\n } }));\n }\n if (typeof optimistic === \"string\") {\n // Note that there can be multiple layers with the same optimistic ID.\n // When removeOptimistic(id) is called for that id, all matching layers\n // will be removed, and the remaining layers will be reapplied.\n this.optimisticData = this.optimisticData.addLayer(optimistic, perform);\n }\n else if (optimistic === false) {\n // Ensure both this.data and this.optimisticData refer to the root\n // (non-optimistic) layer of the cache during the update. Note that\n // this.data could be a Layer if we are currently executing an optimistic\n // update function, but otherwise will always be an EntityStore.Root\n // instance.\n perform(this.data);\n }\n else {\n // Otherwise, leave this.data and this.optimisticData unchanged and run\n // the update with broadcast batching.\n perform();\n }\n if (typeof removeOptimistic === \"string\") {\n this.optimisticData = this.optimisticData.removeLayer(removeOptimistic);\n }\n // Note: if this.txCount > 0, then alreadyDirty.size === 0, so this code\n // takes the else branch and calls this.broadcastWatches(options), which\n // does nothing when this.txCount > 0.\n if (onWatchUpdated && alreadyDirty.size) {\n this.broadcastWatches(__assign(__assign({}, options), { onWatchUpdated: function (watch, diff) {\n var result = onWatchUpdated.call(this, watch, diff);\n if (result !== false) {\n // Since onWatchUpdated did not return false, this diff is\n // about to be broadcast to watch.callback, so we don't need\n // to re-dirty it with the other alreadyDirty watches below.\n alreadyDirty.delete(watch);\n }\n return result;\n } }));\n // Silently re-dirty any watches that were already dirty before the update\n // was performed, and were not broadcast just now.\n if (alreadyDirty.size) {\n alreadyDirty.forEach(function (watch) { return _this.maybeBroadcastWatch.dirty(watch); });\n }\n }\n else {\n // If alreadyDirty is empty or we don't have an onWatchUpdated\n // function, we don't need to go to the trouble of wrapping\n // options.onWatchUpdated.\n this.broadcastWatches(options);\n }\n return updateResult;\n };\n InMemoryCache.prototype.performTransaction = function (update, optimisticId) {\n return this.batch({\n update: update,\n optimistic: optimisticId || optimisticId !== null,\n });\n };\n InMemoryCache.prototype.transformDocument = function (document) {\n return this.addTypenameToDocument(this.addFragmentsToDocument(document));\n };\n InMemoryCache.prototype.broadcastWatches = function (options) {\n var _this = this;\n if (!this.txCount) {\n this.watches.forEach(function (c) { return _this.maybeBroadcastWatch(c, options); });\n }\n };\n InMemoryCache.prototype.addFragmentsToDocument = function (document) {\n var fragments = this.config.fragments;\n return fragments ? fragments.transform(document) : document;\n };\n InMemoryCache.prototype.addTypenameToDocument = function (document) {\n if (this.addTypename) {\n return this.addTypenameTransform.transformDocument(document);\n }\n return document;\n };\n // This method is wrapped by maybeBroadcastWatch, which is called by\n // broadcastWatches, so that we compute and broadcast results only when\n // the data that would be broadcast might have changed. It would be\n // simpler to check for changes after recomputing a result but before\n // broadcasting it, but this wrapping approach allows us to skip both\n // the recomputation and the broadcast, in most cases.\n InMemoryCache.prototype.broadcastWatch = function (c, options) {\n var lastDiff = c.lastDiff;\n // Both WatchOptions and DiffOptions extend ReadOptions, and DiffOptions\n // currently requires no additional properties, so we can use c (a\n // WatchOptions object) as DiffOptions, without having to allocate a new\n // object, and without having to enumerate the relevant properties (query,\n // variables, etc.) explicitly. There will be some additional properties\n // (lastDiff, callback, etc.), but cache.diff ignores them.\n var diff = this.diff(c);\n if (options) {\n if (c.optimistic && typeof options.optimistic === \"string\") {\n diff.fromOptimisticTransaction = true;\n }\n if (options.onWatchUpdated &&\n options.onWatchUpdated.call(this, c, diff, lastDiff) === false) {\n // Returning false from the onWatchUpdated callback will prevent\n // calling c.callback(diff) for this watcher.\n return;\n }\n }\n if (!lastDiff || !equal(lastDiff.result, diff.result)) {\n c.callback((c.lastDiff = diff), lastDiff);\n }\n };\n return InMemoryCache;\n}(ApolloCache));\nexport { InMemoryCache };\nif (globalThis.__DEV__ !== false) {\n InMemoryCache.prototype.getMemoryInternals = getInMemoryCacheMemoryInternals;\n}\n//# sourceMappingURL=inMemoryCache.js.map","import React from 'react';\nimport client from './src/client';\nimport fetch from 'isomorphic-fetch';\nimport {ApolloClient, ApolloProvider, InMemoryCache} from '@apollo/client';\n\nexport const wrapRootElement = ({element}, options) => (\n \n {element}\n \n);\n","\"use strict\";\n\nexports.onRouteUpdate = function (_ref, pluginOptions) {\n var location = _ref.location;\n if (pluginOptions === void 0) {\n pluginOptions = {};\n }\n if (process.env.NODE_ENV !== \"production\" || typeof gtag !== \"function\") {\n return null;\n }\n var pluginConfig = pluginOptions.pluginConfig || {};\n var pathIsExcluded = location && typeof window.excludeGtagPaths !== \"undefined\" && window.excludeGtagPaths.some(function (rx) {\n return rx.test(location.pathname);\n });\n if (pathIsExcluded) return null;\n\n // wrap inside a timeout to make sure react-helmet is done with its changes (https://github.com/gatsbyjs/gatsby/issues/11592)\n var sendPageView = function sendPageView() {\n var pagePath = location ? location.pathname + location.search + location.hash : undefined;\n window.gtag(\"event\", \"page_view\", {\n page_path: pagePath\n });\n };\n var _pluginConfig$delayOn = pluginConfig.delayOnRouteUpdate,\n delayOnRouteUpdate = _pluginConfig$delayOn === void 0 ? 0 : _pluginConfig$delayOn;\n if (\"requestAnimationFrame\" in window) {\n requestAnimationFrame(function () {\n requestAnimationFrame(function () {\n return setTimeout(sendPageView, delayOnRouteUpdate);\n });\n });\n } else {\n // Delay by 32ms to simulate 2 requestOnAnimationFrame calls\n setTimeout(sendPageView, 32 + delayOnRouteUpdate);\n }\n return null;\n};","/* global __MANIFEST_PLUGIN_HAS_LOCALISATION__ */\nimport { withPrefix } from \"gatsby\";\nimport getManifestForPathname from \"./get-manifest-pathname\";\n\n// when we don't have localisation in our manifest, we tree shake everything away\nexport const onRouteUpdate = function onRouteUpdate({\n location\n}, pluginOptions) {\n if (__MANIFEST_PLUGIN_HAS_LOCALISATION__) {\n const {\n localize\n } = pluginOptions;\n const manifestFilename = getManifestForPathname(location.pathname, localize, true);\n const manifestEl = document.head.querySelector(`link[rel=\"manifest\"]`);\n if (manifestEl) {\n manifestEl.setAttribute(`href`, withPrefix(manifestFilename));\n }\n }\n};","\"use strict\";\n\nexports.__esModule = true;\nexports.default = void 0;\nvar _gatsby = require(\"gatsby\");\n/**\n * Get a manifest filename depending on localized pathname\n *\n * @param {string} pathname\n * @param {Array<{start_url: string, lang: string}>} localizedManifests\n * @param {boolean} shouldPrependPathPrefix\n * @return string\n */\nvar _default = (pathname, localizedManifests, shouldPrependPathPrefix = false) => {\n const defaultFilename = `manifest.webmanifest`;\n if (!Array.isArray(localizedManifests)) {\n return defaultFilename;\n }\n const localizedManifest = localizedManifests.find(app => {\n let startUrl = app.start_url;\n if (shouldPrependPathPrefix) {\n startUrl = (0, _gatsby.withPrefix)(startUrl);\n }\n return pathname.startsWith(startUrl);\n });\n if (!localizedManifest) {\n return defaultFilename;\n }\n return `manifest_${localizedManifest.lang}.webmanifest`;\n};\nexports.default = _default;","/**\n * Copyright (c) 2013-present, Facebook, Inc.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\n'use strict';\n\n/**\n * Use invariant() to assert state which your program assumes to be true.\n *\n * Provide sprintf-style format (only %s is supported) and arguments\n * to provide information about what broke and what you were\n * expecting.\n *\n * The invariant message will be stripped in production, but the invariant\n * will remain to ensure logic does not differ in production.\n */\n\nvar invariant = function(condition, format, a, b, c, d, e, f) {\n if (process.env.NODE_ENV !== 'production') {\n if (format === undefined) {\n throw new Error('invariant requires an error message argument');\n }\n }\n\n if (!condition) {\n var error;\n if (format === undefined) {\n error = new Error(\n 'Minified exception occurred; use the non-minified dev environment ' +\n 'for the full error message and additional helpful warnings.'\n );\n } else {\n var args = [a, b, c, d, e, f];\n var argIndex = 0;\n error = new Error(\n format.replace(/%s/g, function() { return args[argIndex++]; })\n );\n error.name = 'Invariant Violation';\n }\n\n error.framesToPop = 1; // we don't care about invariant's own frame\n throw error;\n }\n};\n\nmodule.exports = invariant;\n","/**\n * @license React\n * react-server-dom-webpack.production.min.js\n *\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n'use strict';var k=require(\"react\"),l={stream:!0},n=new Map,p=Symbol.for(\"react.element\"),q=Symbol.for(\"react.lazy\"),r=Symbol.for(\"react.default_value\"),t=k.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.ContextRegistry;function u(a){t[a]||(t[a]=k.createServerContext(a,r));return t[a]}function v(a,b,c){this._status=a;this._value=b;this._response=c}v.prototype.then=function(a){0===this._status?(null===this._value&&(this._value=[]),this._value.push(a)):a()};\nfunction w(a){switch(a._status){case 3:return a._value;case 1:var b=JSON.parse(a._value,a._response._fromJSON);a._status=3;return a._value=b;case 2:b=a._value;for(var c=b.chunks,d=0;d {\n const { forward = [], ...filteredConfig } = config || {};\n const configStr = JSON.stringify(filteredConfig, (k, v) => {\n if (typeof v === 'function') {\n v = String(v);\n if (v.startsWith(k + '(')) {\n v = 'function ' + v;\n }\n }\n return v;\n });\n return [\n `!(function(w,p,f,c){`,\n Object.keys(filteredConfig).length > 0\n ? `c=w[p]=Object.assign(w[p]||{},${configStr});`\n : `c=w[p]=w[p]||{};`,\n `c[f]=(c[f]||[])`,\n forward.length > 0 ? `.concat(${JSON.stringify(forward)})` : ``,\n `})(window,'partytown','forward');`,\n snippetCode,\n ].join('');\n};\n\n/**\n * The `type` attribute for Partytown scripts, which does two things:\n *\n * 1. Prevents the `