Nuxt.js: Page level caching

Created on 3 Apr 2019  路  4Comments  路  Source: nuxt/nuxt.js

What problem does this feature solve?

Reduce server side rendering time using page level caching.

What does the proposed changes look like?

We could use component cache. But this is still quite unnecessarily slow.

Would be nice to have a page level cache built in into Nuxt. See page level caching at Vue docs

What would be nice is a function with accepts a route and returns a cache key if it's able to be cached.

This feature request is available on Nuxt community (#c8983)
feature-request

Most helpful comment

Duplicate of #4698. Ideally, we would all be using NGINX, but @clarkdo suggested this module. Here's a stripped down version of that with simple queuing I wrote and use:

import LRU from 'lru-cache'

const stringify = obj => JSON.stringify(obj, (k, v) =>
  typeof v === 'object' && v instanceof Set
    ? [ 'set', [ ...v ] ]
    : typeof v === 'function'
      ? [ 'func', v() ]
      : v
)

const parse = str => JSON.parse(str, (k, v) =>
  Array.isArray(v) && v.length === 2
    ? v[0] === 'set'
      ? new Set(v[1])
      : v[0] === 'func'
        ? () => v[1]
        : v
    : v
)

export default function ({ max = 100, maxAge = 3000 } = {}) {
  const cache = new LRU({
    max,
    maxAge,
  })

  const queue = new Map()

  const renderer = this.nuxt.renderer
  const renderRoute = renderer.renderRoute.bind(renderer)

  renderer.renderRoute = function (route, context) {
    if (cache.has(route)) {
      const cached = cache.get(route)

      if (cached) {
        return parse(cached)
      }
    }

    if (queue.has(route)) {
      const queued = queue.get(route)

      if (queued) {
        return queued
      }
    }

    const promise = new Promise((resolve, reject) => {
      renderRoute(route, context)
        .then((result) => {
          if (!result.error) {
            cache.set(route, stringify(result))
          }

          return resolve(result)
        })
        .catch((err) => {
          return reject(err)
        })
        .finally(() => {
          queue.delete(route)
        })
    })

    queue.set(route, promise)

    return promise
  }
}

All 4 comments

Definitely a must for our project. Would love to see this.

+1, would be really nice to have this feature! My web application is rather slow now, and a built-in solution would be highly preferable over some random package.

Duplicate of #4698. Ideally, we would all be using NGINX, but @clarkdo suggested this module. Here's a stripped down version of that with simple queuing I wrote and use:

import LRU from 'lru-cache'

const stringify = obj => JSON.stringify(obj, (k, v) =>
  typeof v === 'object' && v instanceof Set
    ? [ 'set', [ ...v ] ]
    : typeof v === 'function'
      ? [ 'func', v() ]
      : v
)

const parse = str => JSON.parse(str, (k, v) =>
  Array.isArray(v) && v.length === 2
    ? v[0] === 'set'
      ? new Set(v[1])
      : v[0] === 'func'
        ? () => v[1]
        : v
    : v
)

export default function ({ max = 100, maxAge = 3000 } = {}) {
  const cache = new LRU({
    max,
    maxAge,
  })

  const queue = new Map()

  const renderer = this.nuxt.renderer
  const renderRoute = renderer.renderRoute.bind(renderer)

  renderer.renderRoute = function (route, context) {
    if (cache.has(route)) {
      const cached = cache.get(route)

      if (cached) {
        return parse(cached)
      }
    }

    if (queue.has(route)) {
      const queued = queue.get(route)

      if (queued) {
        return queued
      }
    }

    const promise = new Promise((resolve, reject) => {
      renderRoute(route, context)
        .then((result) => {
          if (!result.error) {
            cache.set(route, stringify(result))
          }

          return resolve(result)
        })
        .catch((err) => {
          return reject(err)
        })
        .finally(() => {
          queue.delete(route)
        })
    })

    queue.set(route, promise)

    return promise
  }
}

It would be great to have this supported out of the box though. Maybe we should leave it open as feature request? /cc @manniL

Was this page helpful?
0 / 5 - 0 ratings

Related issues

maicong picture maicong  路  3Comments

mikekidder picture mikekidder  路  3Comments

lazycrazy picture lazycrazy  路  3Comments

mattdharmon picture mattdharmon  路  3Comments

o-alexandrov picture o-alexandrov  路  3Comments