source: trip-planner-front/node_modules/make-fetch-happen/lib/cache/entry.js@ 76712b2

Last change on this file since 76712b2 was 6a3a178, checked in by Ema <ema_spirova@…>, 3 years ago

initial commit

  • Property mode set to 100644
File size: 16.4 KB
RevLine 
[6a3a178]1const { Request, Response } = require('minipass-fetch')
2const Minipass = require('minipass')
3const MinipassCollect = require('minipass-collect')
4const MinipassFlush = require('minipass-flush')
5const MinipassPipeline = require('minipass-pipeline')
6const cacache = require('cacache')
7const url = require('url')
8
9const CachePolicy = require('./policy.js')
10const cacheKey = require('./key.js')
11const remote = require('../remote.js')
12
13const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
14
15// maximum amount of data we will buffer into memory
16// if we'll exceed this, we switch to streaming
17const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB
18
19// allow list for request headers that will be written to the cache index
20// note: we will also store any request headers
21// that are named in a response's vary header
22const KEEP_REQUEST_HEADERS = [
23 'accept-charset',
24 'accept-encoding',
25 'accept-language',
26 'accept',
27 'cache-control',
28]
29
30// allow list for response headers that will be written to the cache index
31// note: we must not store the real response's age header, or when we load
32// a cache policy based on the metadata it will think the cached response
33// is always stale
34const KEEP_RESPONSE_HEADERS = [
35 'cache-control',
36 'content-encoding',
37 'content-language',
38 'content-type',
39 'date',
40 'etag',
41 'expires',
42 'last-modified',
43 'location',
44 'pragma',
45 'vary',
46]
47
48// return an object containing all metadata to be written to the index
49const getMetadata = (request, response, options) => {
50 const metadata = {
51 time: Date.now(),
52 url: request.url,
53 reqHeaders: {},
54 resHeaders: {},
55 }
56
57 // only save the status if it's not a 200 or 304
58 if (response.status !== 200 && response.status !== 304)
59 metadata.status = response.status
60
61 for (const name of KEEP_REQUEST_HEADERS) {
62 if (request.headers.has(name))
63 metadata.reqHeaders[name] = request.headers.get(name)
64 }
65
66 // if the request's host header differs from the host in the url
67 // we need to keep it, otherwise it's just noise and we ignore it
68 const host = request.headers.get('host')
69 const parsedUrl = new url.URL(request.url)
70 if (host && parsedUrl.host !== host)
71 metadata.reqHeaders.host = host
72
73 // if the response has a vary header, make sure
74 // we store the relevant request headers too
75 if (response.headers.has('vary')) {
76 const vary = response.headers.get('vary')
77 // a vary of "*" means every header causes a different response.
78 // in that scenario, we do not include any additional headers
79 // as the freshness check will always fail anyway and we don't
80 // want to bloat the cache indexes
81 if (vary !== '*') {
82 // copy any other request headers that will vary the response
83 const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
84 for (const name of varyHeaders) {
85 // explicitly ignore accept-encoding here
86 if (name !== 'accept-encoding' && request.headers.has(name))
87 metadata.reqHeaders[name] = request.headers.get(name)
88 }
89 }
90 }
91
92 for (const name of KEEP_RESPONSE_HEADERS) {
93 if (response.headers.has(name))
94 metadata.resHeaders[name] = response.headers.get(name)
95 }
96
97 // we only store accept-encoding and content-encoding if the user
98 // has disabled automatic compression and decompression in minipass-fetch
99 // since if it's enabled (the default) then the content will have
100 // already been decompressed making the header a lie
101 if (options.compress === false) {
102 metadata.reqHeaders['accept-encoding'] = request.headers.get('accept-encoding')
103 metadata.resHeaders['content-encoding'] = response.headers.get('content-encoding')
104 }
105
106 return metadata
107}
108
109// symbols used to hide objects that may be lazily evaluated in a getter
110const _request = Symbol('request')
111const _response = Symbol('response')
112const _policy = Symbol('policy')
113
114class CacheEntry {
115 constructor ({ entry, request, response, options }) {
116 if (entry) {
117 this.key = entry.key
118 this.entry = entry
119 // previous versions of this module didn't write an explicit timestamp in
120 // the metadata, so fall back to the entry's timestamp. we can't use the
121 // entry timestamp to determine staleness because cacache will update it
122 // when it verifies its data
123 this.entry.metadata.time = this.entry.metadata.time || this.entry.time
124 } else
125 this.key = cacheKey(request)
126
127 this.options = options
128
129 // these properties are behind getters that lazily evaluate
130 this[_request] = request
131 this[_response] = response
132 this[_policy] = null
133 }
134
135 // returns a CacheEntry instance that satisfies the given request
136 // or undefined if no existing entry satisfies
137 static async find (request, options) {
138 try {
139 // compacts the index and returns an array of unique entries
140 var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
141 const entryA = new CacheEntry({ entry: A, options })
142 const entryB = new CacheEntry({ entry: B, options })
143 return entryA.policy.satisfies(entryB.request)
144 }, {
145 validateEntry: (entry) => {
146 // if an integrity is null, it needs to have a status specified
147 if (entry.integrity === null)
148 return !!(entry.metadata && entry.metadata.status)
149
150 return true
151 },
152 })
153 } catch (err) {
154 // if the compact request fails, ignore the error and return
155 return
156 }
157
158 // a cache mode of 'reload' means to behave as though we have no cache
159 // on the way to the network. return undefined to allow cacheFetch to
160 // create a brand new request no matter what.
161 if (options.cache === 'reload')
162 return
163
164 // find the specific entry that satisfies the request
165 let match
166 for (const entry of matches) {
167 const _entry = new CacheEntry({
168 entry,
169 options,
170 })
171
172 if (_entry.policy.satisfies(request)) {
173 match = _entry
174 break
175 }
176 }
177
178 return match
179 }
180
181 // if the user made a PUT/POST/PATCH then we invalidate our
182 // cache for the same url by deleting the index entirely
183 static async invalidate (request, options) {
184 const key = cacheKey(request)
185 try {
186 await cacache.rm.entry(options.cachePath, key, { removeFully: true })
187 } catch (err) {
188 // ignore errors
189 }
190 }
191
192 get request () {
193 if (!this[_request]) {
194 this[_request] = new Request(this.entry.metadata.url, {
195 method: 'GET',
196 headers: this.entry.metadata.reqHeaders,
197 })
198 }
199
200 return this[_request]
201 }
202
203 get response () {
204 if (!this[_response]) {
205 this[_response] = new Response(null, {
206 url: this.entry.metadata.url,
207 counter: this.options.counter,
208 status: this.entry.metadata.status || 200,
209 headers: {
210 ...this.entry.metadata.resHeaders,
211 'content-length': this.entry.size,
212 },
213 })
214 }
215
216 return this[_response]
217 }
218
219 get policy () {
220 if (!this[_policy]) {
221 this[_policy] = new CachePolicy({
222 entry: this.entry,
223 request: this.request,
224 response: this.response,
225 options: this.options,
226 })
227 }
228
229 return this[_policy]
230 }
231
232 // wraps the response in a pipeline that stores the data
233 // in the cache while the user consumes it
234 async store (status) {
235 // if we got a status other than 200, 301, or 308,
236 // or the CachePolicy forbid storage, append the
237 // cache status header and return it untouched
238 if (this.request.method !== 'GET' || ![200, 301, 308].includes(this.response.status) || !this.policy.storable()) {
239 this.response.headers.set('x-local-cache-status', 'skip')
240 return this.response
241 }
242
243 const size = this.response.headers.get('content-length')
244 const fitsInMemory = !!size && Number(size) < MAX_MEM_SIZE
245 const shouldBuffer = this.options.memoize !== false && fitsInMemory
246 const cacheOpts = {
247 algorithms: this.options.algorithms,
248 metadata: getMetadata(this.request, this.response, this.options),
249 size,
250 memoize: fitsInMemory && this.options.memoize,
251 }
252
253 let body = null
254 // we only set a body if the status is a 200, redirects are
255 // stored as metadata only
256 if (this.response.status === 200) {
257 let cacheWriteResolve, cacheWriteReject
258 const cacheWritePromise = new Promise((resolve, reject) => {
259 cacheWriteResolve = resolve
260 cacheWriteReject = reject
261 })
262
263 body = new MinipassPipeline(new MinipassFlush({
264 flush () {
265 return cacheWritePromise
266 },
267 }))
268
269 let abortStream, onResume
270 if (shouldBuffer) {
271 // if the result fits in memory, use a collect stream to gather
272 // the response and write it to cacache while also passing it through
273 // to the user
274 onResume = () => {
275 const collector = new MinipassCollect.PassThrough()
276 abortStream = collector
277 collector.on('collect', (data) => {
278 // TODO if the cache write fails, log a warning but return the response anyway
279 cacache.put(this.options.cachePath, this.key, data, cacheOpts).then(cacheWriteResolve, cacheWriteReject)
280 })
281 body.unshift(collector)
282 body.unshift(this.response.body)
283 }
284 } else {
285 // if it does not fit in memory, create a tee stream and use
286 // that to pipe to both the cache and the user simultaneously
287 onResume = () => {
288 const tee = new Minipass()
289 const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
290 abortStream = cacheStream
291 tee.pipe(cacheStream)
292 // TODO if the cache write fails, log a warning but return the response anyway
293 cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
294 body.unshift(tee)
295 body.unshift(this.response.body)
296 }
297 }
298
299 body.once('resume', onResume)
300 body.once('end', () => body.removeListener('resume', onResume))
301 this.response.body.on('error', (err) => {
302 // the abortStream will either be a MinipassCollect if we buffer
303 // or a cacache write stream, either way be sure to listen for
304 // errors from the actual response and avoid writing data that we
305 // know to be invalid to the cache
306 abortStream.destroy(err)
307 })
308 } else
309 await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
310
311 // note: we do not set the x-local-cache-hash header because we do not know
312 // the hash value until after the write to the cache completes, which doesn't
313 // happen until after the response has been sent and it's too late to write
314 // the header anyway
315 this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
316 this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
317 this.response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream')
318 this.response.headers.set('x-local-cache-status', status)
319 this.response.headers.set('x-local-cache-time', new Date().toISOString())
320 const newResponse = new Response(body, {
321 url: this.response.url,
322 status: this.response.status,
323 headers: this.response.headers,
324 counter: this.options.counter,
325 })
326 return newResponse
327 }
328
329 // use the cached data to create a response and return it
330 async respond (method, options, status) {
331 let response
332 const size = Number(this.response.headers.get('content-length'))
333 const fitsInMemory = !!size && size < MAX_MEM_SIZE
334 const shouldBuffer = this.options.memoize !== false && fitsInMemory
335 if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
336 // if the request is a HEAD, or the response is a redirect,
337 // then the metadata in the entry already includes everything
338 // we need to build a response
339 response = this.response
340 } else {
341 // we're responding with a full cached response, so create a body
342 // that reads from cacache and attach it to a new Response
343 const body = new Minipass()
344 const removeOnResume = () => body.removeListener('resume', onResume)
345 let onResume
346 if (shouldBuffer) {
347 onResume = async () => {
348 removeOnResume()
349 try {
350 const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
351 body.end(content)
352 } catch (err) {
353 if (err.code === 'EINTEGRITY')
354 await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
355 if (err.code === 'ENOENT' || err.code === 'EINTEGRITY')
356 await CacheEntry.invalidate(this.request, this.options)
357 body.emit('error', err)
358 }
359 }
360 } else {
361 onResume = () => {
362 const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
363 cacheStream.on('error', async (err) => {
364 cacheStream.pause()
365 if (err.code === 'EINTEGRITY')
366 await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
367 if (err.code === 'ENOENT' || err.code === 'EINTEGRITY')
368 await CacheEntry.invalidate(this.request, this.options)
369 body.emit('error', err)
370 cacheStream.resume()
371 })
372 cacheStream.pipe(body)
373 }
374 }
375
376 body.once('resume', onResume)
377 body.once('end', removeOnResume)
378 response = new Response(body, {
379 url: this.entry.metadata.url,
380 counter: options.counter,
381 status: 200,
382 headers: {
383 ...this.policy.responseHeaders(),
384 },
385 })
386 }
387
388 response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
389 response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
390 response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
391 response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream')
392 response.headers.set('x-local-cache-status', status)
393 response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
394 return response
395 }
396
397 // use the provided request along with this cache entry to
398 // revalidate the stored response. returns a response, either
399 // from the cache or from the update
400 async revalidate (request, options) {
401 const revalidateRequest = new Request(request, {
402 headers: this.policy.revalidationHeaders(request),
403 })
404
405 try {
406 // NOTE: be sure to remove the headers property from the
407 // user supplied options, since we have already defined
408 // them on the new request object. if they're still in the
409 // options then those will overwrite the ones from the policy
410 var response = await remote(revalidateRequest, {
411 ...options,
412 headers: undefined,
413 })
414 } catch (err) {
415 // if the network fetch fails, return the stale
416 // cached response unless it has a cache-control
417 // of 'must-revalidate'
418 if (!this.policy.mustRevalidate)
419 return this.respond(request.method, options, 'stale')
420
421 throw err
422 }
423
424 if (this.policy.revalidated(revalidateRequest, response)) {
425 // we got a 304, write a new index to the cache and respond from cache
426 const metadata = getMetadata(request, response, options)
427 // 304 responses do not include headers that are specific to the response data
428 // since they do not include a body, so we copy values for headers that were
429 // in the old cache entry to the new one, if the new metadata does not already
430 // include that header
431 for (const name of KEEP_RESPONSE_HEADERS) {
432 if (!hasOwnProperty(metadata.resHeaders, name) && hasOwnProperty(this.entry.metadata.resHeaders, name))
433 metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
434 }
435
436 try {
437 await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
438 size: this.entry.size,
439 metadata,
440 })
441 } catch (err) {
442 // if updating the cache index fails, we ignore it and
443 // respond anyway
444 }
445 return this.respond(request.method, options, 'revalidated')
446 }
447
448 // if we got a modified response, create a new entry based on it
449 const newEntry = new CacheEntry({
450 request,
451 response,
452 options,
453 })
454
455 // respond with the new entry while writing it to the cache
456 return newEntry.store('updated')
457 }
458}
459
460module.exports = CacheEntry
Note: See TracBrowser for help on using the repository browser.