|
|
|
import nock from 'nock';
|
|
|
|
import sinon from 'sinon';
|
|
|
|
|
|
|
|
import { getStorageItem, setStorageItem } from './storage-helpers';
|
|
|
|
|
|
|
|
jest.mock('./storage-helpers.js', () => ({
|
|
|
|
getStorageItem: jest.fn(),
|
|
|
|
setStorageItem: jest.fn(),
|
|
|
|
}));
|
|
|
|
|
|
|
|
const fetchWithCache = require('./fetch-with-cache').default;
|
|
|
|
|
|
|
|
describe('Fetch with cache', () => {
|
|
|
|
afterEach(() => {
|
|
|
|
sinon.restore();
|
|
|
|
nock.cleanAll();
|
|
|
|
});
|
|
|
|
|
|
|
|
it('fetches a url', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 1}');
|
|
|
|
|
|
|
|
const response = await fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
);
|
|
|
|
expect(response).toStrictEqual({
|
|
|
|
average: 1,
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
it('returns cached response', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 2}');
|
|
|
|
|
|
|
|
getStorageItem.mockReturnValueOnce({
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
cachedResponse: { average: 1 },
|
|
|
|
cachedTime: Date.now(),
|
|
|
|
});
|
|
|
|
|
|
|
|
const response = await fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
);
|
|
|
|
expect(response).toStrictEqual({
|
|
|
|
average: 1,
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
it('fetches URL again after cache refresh time has passed', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 3}');
|
|
|
|
|
|
|
|
getStorageItem.mockReturnValueOnce({
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
cachedResponse: { average: 1 },
|
|
|
|
cachedTime: Date.now() - 1000,
|
|
|
|
});
|
|
|
|
|
|
|
|
const response = await fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
{},
|
|
|
|
{ cacheRefreshTime: 123 },
|
|
|
|
);
|
|
|
|
expect(response).toStrictEqual({
|
|
|
|
average: 3,
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
it('should abort the request when the custom timeout is hit', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.delay(100)
|
|
|
|
.reply(200, '{"average": 4}');
|
|
|
|
|
|
|
|
await expect(() =>
|
|
|
|
fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/price',
|
|
|
|
{},
|
|
|
|
{ timeout: 20 },
|
|
|
|
),
|
|
|
|
).rejects.toThrow({
|
|
|
|
name: 'AbortError',
|
|
|
|
message: 'The user aborted a request.',
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
it('throws when the response is unsuccessful', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(500, '{"average": 6}');
|
|
|
|
|
|
|
|
await expect(() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price'),
|
|
|
|
).rejects.toThrow('');
|
|
|
|
});
|
|
|
|
|
|
|
|
it('throws when a POST request is attempted', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.post('/price')
|
|
|
|
.reply(200, '{"average": 7}');
|
|
|
|
|
|
|
|
await expect(() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price', {
|
|
|
|
method: 'POST',
|
|
|
|
}),
|
|
|
|
).rejects.toThrow('');
|
|
|
|
});
|
|
|
|
|
|
|
|
it('throws when the request has a truthy body', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 8}');
|
|
|
|
|
|
|
|
await expect(() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price', { body: 1 }),
|
|
|
|
).rejects.toThrow('');
|
|
|
|
});
|
|
|
|
|
|
|
|
it('throws when the request has an invalid Content-Type header', async () => {
|
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/price')
|
|
|
|
.reply(200, '{"average": 9}');
|
|
|
|
|
|
|
|
await expect(() =>
|
|
|
|
fetchWithCache('https://fetchwithcache.metamask.io/price', {
|
|
|
|
headers: { 'Content-Type': 'text/plain' },
|
|
|
|
}),
|
|
|
|
).rejects.toThrow({
|
|
|
|
message: 'fetchWithCache only supports JSON responses',
|
|
|
|
});
|
|
|
|
});
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
|
|
|
|
it('should correctly cache responses from interwoven requests', async () => {
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/foo')
|
|
|
|
.reply(200, '{"average": 9}');
|
|
|
|
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
nock('https://fetchwithcache.metamask.io')
|
|
|
|
.get('/bar')
|
|
|
|
.reply(200, '{"average": 9}');
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
|
|
|
|
const testCache = {};
|
|
|
|
getStorageItem.mockImplementation((key) => testCache[key]);
|
|
|
|
setStorageItem.mockImplementation((key, value) => {
|
|
|
|
testCache[key] = value;
|
|
|
|
});
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
|
|
|
|
await Promise.all([
|
|
|
|
fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/foo',
|
|
|
|
{},
|
|
|
|
{ cacheRefreshTime: 123 },
|
|
|
|
),
|
|
|
|
fetchWithCache(
|
|
|
|
'https://fetchwithcache.metamask.io/bar',
|
|
|
|
{},
|
|
|
|
{ cacheRefreshTime: 123 },
|
|
|
|
),
|
|
|
|
]);
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
|
|
|
|
expect(
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
testCache['cachedFetch:https://fetchwithcache.metamask.io/foo']
|
|
|
|
.cachedResponse,
|
|
|
|
).toStrictEqual({ average: 9 });
|
|
|
|
expect(
|
Fix `fetch-with-cache` handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage
API was replaced with an async storage API. The problem arises when
`fetchWithCache` is called a second time while it's still processing
another call. In this case, the `cachedFetch` object can become
stale while blocked waiting for a fetch response, and result in a cache
being overwritten unintentionally.
See this example (options omitted for simplicity, and assuming an empty
initial cache):
```
await Promise.all([
fetchWithCache('https://metamask.io/foo'),
fetchWithCache('https://metamask.io/bar'),
]
```
The order of events could be as follows:
1. Empty cache retrieved for `/foo` route
2. Empty cache retrieved for `/bar` route
3. Call made to `/foo` route
4. Call made to `/bar` route
5. `/foo` response is added to the empty cache object retrieved in
step 1, then is saved in the cache.
6. `/bar` response is added to the empty cache object retrieved in
step 2, then is saved in the cache.
In step 6, the cache object saved would not contain the `/foo`
response set in step 5. As a result, `/foo` would never be cached.
This problem was resolved by embedding the URL being cached directly in
the cache key. This prevents simultaneous responses from overwriting
each others caches.
Technically a data race still exists when handing simultaneous
responses to the same route, but the result would be that the last call
to finish would overwrite the previous. This seems acceptable.
4 years ago
|
|
|
testCache['cachedFetch:https://fetchwithcache.metamask.io/bar']
|
|
|
|
.cachedResponse,
|
|
|
|
).toStrictEqual({ average: 9 });
|
|
|
|
});
|
|
|
|
});
|