fixed handling of errrs thrown when executing fetch()-ed config.json (fixes #81)

merge-requests/23/merge
Michał 'rysiek' Woźniak 2024-02-18 00:43:42 +00:00
rodzic 4ec2b9f911
commit dd1ef475e8
2 zmienionych plików z 88 dodań i 54 usunięć

Wyświetl plik

@ -504,6 +504,26 @@ describe('service-worker', async () => {
assertSpyCalls(self.fetch, 1)
})
it("should use default LibResilientConfig values when fetching config.json succeeds, but executing it throws an error", async () => {
let mock_response_data = {
data: JSON.stringify({loggedComponents: ['service-worker', 'no-such-plugin'], plugins: [{name: "no-such-plugin"}]})
}
window.fetch = spy(window.getMockedFetch(mock_response_data))
await import("../../service-worker.js?" + window.test_id);
await self.dispatchEvent(new Event('install'))
await self.waitForSWInstall()
assertEquals(typeof self.LibResilientConfig, "object")
assertEquals(self.LibResilientConfig.defaultPluginTimeout, 10000)
assertEquals(self.LibResilientConfig.plugins, [{name: "fetch"},{name: "cache"}])
assertEquals(self.LibResilientConfig.loggedComponents, ['service-worker', 'fetch', 'cache'])
assertEquals(self.LibResilientConfig.normalizeQueryParams, true)
assertEquals(self.LibResilientConfig.useMimeSniffingLibrary, false)
assertSpyCalls(self.fetch, 1)
})
it("should fail if default LibResilientConfig values are invalid and fetched config.json is not valid JSON", async () => {
let mock_response_data = {

Wyświetl plik

@ -242,13 +242,13 @@ let verifyConfigData = (cdata) => {
* configURL - url of the config file
* cresponse - response we're caching
*/
let cacheConfigJSON = async (configURL, cresponse, use_cache) => {
let cacheConfigJSON = async (configURL, cresponse, use_source) => {
try {
var cache = await caches.open(use_cache)
var cache = await caches.open(use_source)
await cache.put(configURL, cresponse)
self.log('service-worker', `config cached in cache: ${use_cache}.`)
self.log('service-worker', `config cached in cache: ${use_source}.`)
} catch(e) {
self.log('service-worker', `failed to cache config in cache ${use_cache}: ${e}`)
self.log('service-worker', `failed to cache config in cache ${use_source}: ${e}`)
}
}
@ -449,12 +449,12 @@ let initServiceWorker = async () => {
// TODO: handle in a more elegant way
let lrpcBackup = new Map(self.LibResilientPluginConstructors)
// caches to try: temp cache, main cache
let available_caches = ['v1', 'v1:verified']
// config sources to try
let config_sources = ['v1', 'v1:verified', 'fetch']
// keep track
let config_executed = false
let use_cache = false
let use_source = false
do {
@ -462,62 +462,76 @@ let initServiceWorker = async () => {
let cdata = false
// where are we getting the config.json from this time?
// we eitehr get a string (name of a cache), or undefined (signifying need for fetch())
use_cache = available_caches.shift()
// we eitehr get a string (name of a cache, or "fetch" for simple fetch()),
// or undefined (signifying need to use the defaults)
use_source = config_sources.shift()
try {
// cache?
if ( typeof use_cache === 'string' ) {
self.log('service-worker', `retrieving config.json from cache: ${use_cache}.`)
cresponse = await caches.match(configURL, {cacheName: use_cache})
// are we using any kind of source, or fall back to defaults?
if ( typeof use_source === 'string' ) {
// bail early if we got nothing
if (cresponse === undefined) {
self.log('service-worker', `config.json not found in cache: ${use_cache}.`)
continue
// some kind of source! cache?
if ( ( use_source === 'v1' ) || ( use_source === 'v1:verified' ) ) {
self.log('service-worker', `retrieving config.json from cache: ${use_source}.`)
cresponse = await caches.match(configURL, {cacheName: use_source})
// bail early if we got nothing
if (cresponse === undefined) {
self.log('service-worker', `config.json not found in cache: ${use_source}.`)
continue
}
// regular fetch?
// (we don't have any plugin transports at this point, obviously...)
} else if ( use_source === "fetch" ) {
self.log('service-worker', `retrieving config.json using fetch().`)
cresponse = await fetch(configURL)
// that should not happen!
} else {
throw new Error(`unknown config.json source: ${use_source}; this should never happen!`)
}
// regular fetch
// (we don't have any plugin transports at this point, obviously...)
// extract the retrieved JSON and verify it
cdata = await getConfigJSON(cresponse)
// do we have anything to work with?
if (cdata === false) {
// cached config.json was invalid; no biggie, try another cache, or fetch()
if ( ( use_source === 'v1' ) || ( use_source === 'v1:verified' ) ) {
self.log('service-worker', `cached config.json is not valid; cache: ${use_source}`)
// if that was a fetch() config, we need to fall-back to defaults!
} else {
self.log('service-worker', `fetched config.json is not valid; using defaults`)
}
// no valid config means we need to go around again
continue
// we good!
} else {
self.log('service-worker', `valid-looking config.json retrieved.`)
}
// anything else just means "use defaults"
} else {
self.log('service-worker', `retrieving config.json using fetch().`)
cresponse = await fetch(configURL)
self.log('service-worker', `retrieving config.json failed completely, using built-in defaults.`)
// defaults means an empty object here,
// we're merging with actual defaults later on
cdata = {}
}
// extract the JSON and verify it
cdata = await getConfigJSON(cresponse)
// exception? no bueno!
} catch(e) {
cdata = false
self.log('service-worker', `exception when trying to retrieve config.json: ${e.message}`)
}
// do we have anything to work with?
if (cdata === false) {
// cached config.json was invalid; no biggie, try another cache, or fetch()
if (typeof use_cache === "string") {
self.log('service-worker', `cached config.json is not valid; cache: ${use_cache}`)
continue
// if that was a fetch() config, we need to run to defaults!
} else {
self.log('service-worker', `fetched config.json is not valid; using defaults`)
// set an empty object, this will in effect deploy pure defaults
cdata = {}
// clear cresponse which will indicate later on
// that we did not use data from any response, cache nor fetch
cresponse = false
}
// we good!
} else {
self.log('service-worker', `valid-looking config.json retrieved.`)
continue
}
// merge configs
// merge configs — either with the retrieved JSON,
// or with an empty object if using defaults
config = {...self.LibResilientConfig, ...cdata}
// try executing the config
@ -538,7 +552,7 @@ let initServiceWorker = async () => {
// if we're using the defaults, and yet loading of the config failed
// something is massively wrong
if ( ( cresponse === false ) && ( config_executed === false ) ) {
if ( ( use_source === undefined ) && ( config_executed === false ) ) {
// this really should never happen
throw new Error('Failed to load the default config; this should never happen!')
}
@ -554,19 +568,19 @@ let initServiceWorker = async () => {
// we're good, let's cache the config as verified if we need to
// that is, if it comes from the "v1" cache...
if (use_cache === "v1") {
if (use_source === "v1") {
self.log('service-worker', `successfully loaded config.json; caching in cache: v1:verified`)
await cacheConfigJSON(configURL, cresponse, 'v1:verified')
// we used the v1:verified cache; we should cache config.json into the v1 cache
// as that will speed things up a bit next time we need to load the service worker
} else if (use_cache === "v1:verified") {
} else if (use_source === "v1:verified") {
self.log('service-worker', `successfully loaded config.json; caching in cache: v1`)
await cacheConfigJSON(configURL, cresponse, 'v1')
// or, was fetch()-ed and valid (no caching if we're going with defaults, obviously)
} else if ( (use_cache === undefined) && (cresponse !== false) ) {
self.log('service-worker', `successfully loaded config.json; caching in cache: v1, v1:verified`)
} else if (use_source === "fetch") {
self.log('service-worker', `successfully loaded config.json; caching in caches: v1, v1:verified`)
// we want to cache to both, so that:
// 1. we get the extra bit of performance from using the v1 cache that is checked first
// 2. but we get the verified config already in the v1:verified cache for later