feat(nuxt): custom cache support for data fetching composables (#20747)

This commit is contained in:
Dario Ferderber 2023-10-16 21:54:39 +02:00 committed by GitHub
parent f4d67a9bcd
commit b52548d915
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 34 additions and 17 deletions

View File

@ -31,6 +31,7 @@ type AsyncDataOptions<DataT> = {
transform?: (input: DataT) => DataT
pick?: string[]
watch?: WatchSource[]
getCachedData?: (key: string) => any
}
type AsyncData<DataT, ErrorT> = {
@ -61,6 +62,7 @@ type AsyncDataRequestStatus = 'idle' | 'pending' | 'success' | 'error'
* _transform_: a function that can be used to alter `handler` function result after resolving
* _pick_: only pick specified keys in this array from the `handler` function result
* _watch_: watch reactive sources to auto-refresh
* _getCachedData_: a function that receives a cache key and can return cached data if it exists (by default it returns `nuxtApp.payload.data[key]` when hydrating and `nuxtApp.static.data[key]` after the app is hydrated). You can use this to build your own custom cache for `useAsyncData`.
* _deep_: return data in a deep ref object (it is `true` by default). It can be set to `false` to return data in a shallow ref object, which can improve performance if your data does not need to be deeply reactive.
Under the hood, `lazy: false` uses `<Suspense>` to block the loading of the route before the data has been fetched. Consider using `lazy: true` and implementing a loading state instead for a snappier user experience.

View File

@ -26,6 +26,7 @@ type UseFetchOptions<DataT> = {
server?: boolean
lazy?: boolean
immediate?: boolean
getCachedData?: (key: string) => any
deep?: boolean
default?: () => DataT
transform?: (input: DataT) => DataT
@ -73,6 +74,7 @@ All fetch options can be given a `computed` or `ref` value. These will be watche
* `transform`: a function that can be used to alter `handler` function result after resolving
* `pick`: only pick specified keys in this array from the `handler` function result
* `watch`: watch an array of reactive sources and auto-refresh the fetch result when they change. Fetch options and URL are watched by default. You can completely ignore reactive sources by using `watch: false`. Together with `immediate: false`, this allows for a fully-manual `useFetch`.
* `getCachedData`: a function that receives a cache key and can return cached data if it exists (by default it returns `nuxtApp.payload.data[key]` when hydrating and `nuxtApp.static.data[key]` after the app is hydrated). You can use this to build your own custom cache for `useFetch`.
* `deep`: return data in a deep ref object (it is `true` by default). It can be set to `false` to return data in a shallow ref object, which can improve performance if your data does not need to be deeply reactive.
::alert{type=warning}

View File

@ -40,6 +40,7 @@ export interface AsyncDataOptions<
server?: boolean
lazy?: boolean
default?: () => DefaultT | Ref<DefaultT>
getCachedData?: (key: string) => DataT
transform?: _Transform<ResT, DataT>
pick?: PickKeys
watch?: MultiWatchSources
@ -60,15 +61,14 @@ export interface AsyncDataExecuteOptions {
export interface _AsyncData<DataT, ErrorT> {
data: Ref<DataT>
pending: Ref<boolean>
refresh: (opts?: AsyncDataExecuteOptions) => Promise<void>
execute: (opts?: AsyncDataExecuteOptions) => Promise<void>
refresh: (opts?: AsyncDataExecuteOptions) => Promise<DataT>
execute: (opts?: AsyncDataExecuteOptions) => Promise<DataT>
error: Ref<ErrorT | null>
status: Ref<AsyncDataRequestStatus>
}
export type AsyncData<Data, Error> = _AsyncData<Data, Error> & Promise<_AsyncData<Data, Error>>
const getDefault = () => null
export function useAsyncData<
ResT,
DataE = Error,
@ -132,20 +132,22 @@ export function useAsyncData<
throw new TypeError('[nuxt] [asyncData] handler must be a function.')
}
// Setup nuxt instance payload
const nuxt = useNuxtApp()
// Used to get default values
const getDefault = () => null
const getDefaultCachedData = () => nuxt.isHydrating ? nuxt.payload.data[key] : nuxt.static.data[key]
// Apply defaults
options.server = options.server ?? true
options.default = options.default ?? (getDefault as () => DefaultT)
options.getCachedData = options.getCachedData ?? getDefaultCachedData
options.lazy = options.lazy ?? false
options.immediate = options.immediate ?? true
// Setup nuxt instance payload
const nuxt = useNuxtApp()
const getCachedData = () => nuxt.isHydrating ? nuxt.payload.data[key] : nuxt.static.data[key]
const hasCachedData = () => ![null, undefined].includes(
nuxt.isHydrating ? nuxt.payload.data[key] : nuxt.static.data[key]
)
const hasCachedData = () => ![null, undefined].includes(options.getCachedData!(key) as any)
// Create or use a shared asyncData entity
if (!nuxt._asyncData[key] || !options.immediate) {
@ -154,7 +156,7 @@ export function useAsyncData<
const _ref = options.deep !== true ? shallowRef : ref
nuxt._asyncData[key] = {
data: _ref(getCachedData() ?? options.default!()),
data: _ref(options.getCachedData!(key) ?? options.default!()),
pending: ref(!hasCachedData()),
error: toRef(nuxt.payload._errors, key),
status: ref('idle')
@ -168,13 +170,13 @@ export function useAsyncData<
if (nuxt._asyncDataPromises[key]) {
if (opts.dedupe === false) {
// Avoid fetching same key more than once at a time
return nuxt._asyncDataPromises[key]
return nuxt._asyncDataPromises[key]!
}
(nuxt._asyncDataPromises[key] as any).cancelled = true
}
// Avoid fetching same key that is already fetched
if ((opts._initial || (nuxt.isHydrating && opts._initial !== false)) && hasCachedData()) {
return getCachedData()
return Promise.resolve(options.getCachedData!(key))
}
asyncData.pending.value = true
asyncData.status.value = 'pending'
@ -222,7 +224,7 @@ export function useAsyncData<
delete nuxt._asyncDataPromises[key]
})
nuxt._asyncDataPromises[key] = promise
return nuxt._asyncDataPromises[key]
return nuxt._asyncDataPromises[key]!
}
const initialFetch = () => asyncData.refresh({ _initial: true })
@ -235,7 +237,7 @@ export function useAsyncData<
if (getCurrentInstance()) {
onServerPrefetch(() => promise)
} else {
nuxt.hook('app:created', () => promise)
nuxt.hook('app:created', async () => { await promise })
}
}
@ -270,9 +272,9 @@ export function useAsyncData<
if (options.watch) {
watch(options.watch, () => asyncData.refresh())
}
const off = nuxt.hook('app:data:refresh', (keys) => {
const off = nuxt.hook('app:data:refresh', async (keys) => {
if (!keys || keys.includes(key)) {
return asyncData.refresh()
await asyncData.refresh()
}
})
if (instance) {

View File

@ -107,6 +107,7 @@ export function useFetch<
pick,
watch,
immediate,
getCachedData,
deep,
...fetchOptions
} = opts
@ -123,6 +124,7 @@ export function useFetch<
transform,
pick,
immediate,
getCachedData,
deep,
watch: watch === false ? [] : [_fetchOptions, _request, ...(watch || [])]
}

View File

@ -165,6 +165,15 @@ describe('useAsyncData', () => {
await refreshNuxtData('key')
expect(data.data.value).toMatchInlineSnapshot('"test"')
})
it('allows custom access to a cache', async () => {
const { data } = await useAsyncData(() => ({ val: true }), { getCachedData: () => ({ val: false }) })
expect(data.value).toMatchInlineSnapshot(`
{
"val": false,
}
`)
})
})
describe('errors', () => {