|
|
|
import assert from 'assert'
|
|
|
|
import nock from 'nock'
|
|
|
|
import sinon from 'sinon'
|
|
|
|
import proxyquire from 'proxyquire'
|
|
|
|
|
|
|
|
const fakeStorage = {}
|
|
|
|
const fetchWithCache = proxyquire('./fetch-with-cache', {
|
|
|
|
'../../../lib/storage-helpers': fakeStorage,
|
|
|
|
}).default
|
|
|
|
|
|
|
|
describe('Fetch with cache', function () {
|
|
|
|
beforeEach(function () {
|
|
|
|
fakeStorage.getStorageItem = sinon.stub()
|
|
|
|
fakeStorage.setStorageItem = sinon.stub()
|
|
|
|
})
|
|
|
|
afterEach(function () {
|
|
|
|
sinon.restore()
|
|
|
|
nock.cleanAll()
|
|
|
|
})
|
|
|
|
|
|
|
|
it('fetches a url', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 1}')
|
|
|
|
|
|
|
|
const response = await fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
)
|
|
|
|
assert.deepStrictEqual(response, {
|
|
|
|
average: 1,
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
it('returns cached response', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 2}')
|
|
|
|
|
|
|
|
fakeStorage.getStorageItem.returns({
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
cachedResponse: { average: 1 },
|
|
|
|
cachedTime: Date.now(),
|
|
|
|
})
|
|
|
|
|
|
|
|
const response = await fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
)
|
|
|
|
assert.deepStrictEqual(response, {
|
|
|
|
average: 1,
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
it('fetches URL again after cache refresh time has passed', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 3}')
|
|
|
|
|
|
|
|
fakeStorage.getStorageItem.returns({
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
cachedResponse: { average: 1 },
|
|
|
|
cachedTime: Date.now() - 1000,
|
|
|
|
})
|
|
|
|
|
|
|
|
const response = await fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
{},
|
|
|
|
{ cacheRefreshTime: 123 },
|
|
|
|
)
|
|
|
|
assert.deepStrictEqual(response, {
|
|
|
|
average: 3,
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
it('should abort the request when the custom timeout is hit', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.delay(100)
|
|
|
|
.reply(200, '{"average": 4}')
|
|
|
|
|
|
|
|
await assert.rejects(
|
|
|
|
() =>
|
|
|
|
fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
{},
|
|
|
|
{ timeout: 20 },
|
|
|
|
),
|
|
|
|
{ name: 'AbortError', message: 'Aborted' },
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('throws when the response is unsuccessful', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(500, '{"average": 6}')
|
|
|
|
|
|
|
|
await assert.rejects(() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price'),
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('throws when a POST request is attempted', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.post('/price')
|
|
|
|
.reply(200, '{"average": 7}')
|
|
|
|
|
|
|
|
await assert.rejects(() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price', {
|
|
|
|
method: 'POST',
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('throws when the request has a truthy body', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 8}')
|
|
|
|
|
|
|
|
await assert.rejects(() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price', { body: 1 }),
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
|
|
|
it('throws when the request has an invalid Content-Type header', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 9}')
|
|
|
|
|
|
|
|
await assert.rejects(
|
|
|
|
() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price', {
|
|
|
|
headers: { 'Content-Type': 'text/plain' },
|
|
|
|
}),
|
|
|
|
{ message: 'fetchWithCache only supports JSON responses' },
|
|
|
|
)
|
|
|
|
})
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
|
|
|
|
it('should correctly cache responses from interwoven requests', async function () {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/foo')
|
|
|
|
.reply(200, '{"average": 9}')
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/bar')
|
|
|
|
.reply(200, '{"average": 9}')
|
|
|
|
|
|
|
|
const testCache = {}
|
|
|
|
fakeStorage.getStorageItem.callsFake((key) => testCache[key])
|
|
|
|
fakeStorage.setStorageItem.callsFake((key, value) => {
|
|
|
|
testCache[key] = value
|
|
|
|
})
|
|
|
|
|
|
|
|
await Promise.all([
|
|
|
|
fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/foo',
|
|
|
|
{},
|
|
|
|
{ cacheRefreshTime: 123 },
|
|
|
|
),
|
|
|
|
fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/bar',
|
|
|
|
{},
|
|
|
|
{ cacheRefreshTime: 123 },
|
|
|
|
),
|
|
|
|
])
|
|
|
|
|
|
|
|
assert.deepStrictEqual(
|
|
|
|
testCache['cachedFetch:https://fetchwithcache.metamask.io/foo']
|
|
|
|
.cachedResponse,
|
|
|
|
{ average: 9 },
|
|
|
|
)
|
|
|
|
assert.deepStrictEqual(
|
|
|
|
testCache['cachedFetch:https://fetchwithcache.metamask.io/bar']
|
|
|
|
.cachedResponse,
|
|
|
|
{ average: 9 },
|
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|