PageRenderTime 46ms CodeModel.GetById 14ms RepoModel.GetById 0ms app.codeStats 1ms

/projects/chrome-dino/sw.js

https://gitlab.com/ratpoopmachine/3kh0-github-io
JavaScript | 399 lines | 267 code | 62 blank | 70 comment | 32 complexity | f4d4b702c8c4888c2c92b0585fbc3400 MD5 | raw file
  1. "use strict";
  2. const OFFLINE_DATA_FILE = "offline.js";
  3. const CACHE_NAME_PREFIX = "chrome-dino";
  4. const BROADCASTCHANNEL_NAME = "offline";
  5. const CONSOLE_PREFIX = "[SW] ";
  6. const LAZYLOAD_KEYNAME = "";
  7. // Create a BroadcastChannel if supported.
  8. const broadcastChannel = (typeof BroadcastChannel === "undefined" ? null : new BroadcastChannel(BROADCASTCHANNEL_NAME));
  9. //////////////////////////////////////
  10. // Utility methods
  11. function PostBroadcastMessage(o)
  12. {
  13. if (!broadcastChannel)
  14. return; // not supported
  15. // Impose artificial (and arbitrary!) delay of 3 seconds to make sure client is listening by the time the message is sent.
  16. // Note we could remove the delay on some messages, but then we create a race condition where sometimes messages can arrive
  17. // in the wrong order (e.g. "update ready" arrives before "started downloading update"). So to keep the consistent ordering,
  18. // delay all messages by the same amount.
  19. setTimeout(() => broadcastChannel.postMessage(o), 3000);
  20. };
  21. function Broadcast(type)
  22. {
  23. PostBroadcastMessage({
  24. "type": type
  25. });
  26. };
  27. function BroadcastDownloadingUpdate(version)
  28. {
  29. PostBroadcastMessage({
  30. "type": "downloading-update",
  31. "version": version
  32. });
  33. }
  34. function BroadcastUpdateReady(version)
  35. {
  36. PostBroadcastMessage({
  37. "type": "update-ready",
  38. "version": version
  39. });
  40. }
  41. function IsUrlInLazyLoadList(url, lazyLoadList)
  42. {
  43. if (!lazyLoadList)
  44. return false; // presumably lazy load list failed to load
  45. try {
  46. for (const lazyLoadRegex of lazyLoadList)
  47. {
  48. if (new RegExp(lazyLoadRegex).test(url))
  49. return true;
  50. }
  51. }
  52. catch (err)
  53. {
  54. console.error(CONSOLE_PREFIX + "Error matching in lazy-load list: ", err);
  55. }
  56. return false;
  57. };
  58. function WriteLazyLoadListToStorage(lazyLoadList)
  59. {
  60. if (typeof localforage === "undefined")
  61. return Promise.resolve(); // bypass if localforage not imported
  62. else
  63. return localforage.setItem(LAZYLOAD_KEYNAME, lazyLoadList)
  64. };
  65. function ReadLazyLoadListFromStorage()
  66. {
  67. if (typeof localforage === "undefined")
  68. return Promise.resolve([]); // bypass if localforage not imported
  69. else
  70. return localforage.getItem(LAZYLOAD_KEYNAME);
  71. };
  72. function GetCacheBaseName()
  73. {
  74. // Include the scope to avoid name collisions with any other SWs on the same origin.
  75. // e.g. "c3offline-https://example.com/foo/" (won't collide with anything under bar/)
  76. return CACHE_NAME_PREFIX + "-" + self.registration.scope;
  77. };
  78. function GetCacheVersionName(version)
  79. {
  80. // Append the version number to the cache name.
  81. // e.g. "c3offline-https://example.com/foo/-v2"
  82. return GetCacheBaseName() + "-v" + version;
  83. };
  84. // Return caches.keys() filtered down to just caches we're interested in (with the right base name).
  85. // This filters out caches from unrelated scopes.
  86. async function GetAvailableCacheNames()
  87. {
  88. const cacheNames = await caches.keys();
  89. const cacheBaseName = GetCacheBaseName();
  90. return cacheNames.filter(n => n.startsWith(cacheBaseName));
  91. };
  92. // Identify if an update is pending, which is the case when we have 2 or more available caches.
  93. // One must be an update that is waiting, since the next navigate that does an upgrade will
  94. // delete all the old caches leaving just one currently-in-use cache.
  95. async function IsUpdatePending()
  96. {
  97. const availableCacheNames = await GetAvailableCacheNames();
  98. return (availableCacheNames.length >= 2);
  99. };
  100. // Automatically deduce the main page URL (e.g. index.html or main.aspx) from the available browser windows.
  101. // This prevents having to hard-code an index page in the file list, implicitly caching it like AppCache did.
  102. async function GetMainPageUrl()
  103. {
  104. const allClients = await clients.matchAll({
  105. includeUncontrolled: true,
  106. type: "window"
  107. });
  108. for (const c of allClients)
  109. {
  110. // Parse off the scope from the full client URL, e.g. https://example.com/index.html -> index.html
  111. let url = c.url;
  112. if (url.startsWith(self.registration.scope))
  113. url = url.substring(self.registration.scope.length);
  114. if (url && url !== "/") // ./ is also implicitly cached so don't bother returning that
  115. {
  116. // If the URL is solely a search string, prefix it with / to ensure it caches correctly.
  117. // e.g. https://example.com/?foo=bar needs to cache as /?foo=bar, not just ?foo=bar.
  118. if (url.startsWith("?"))
  119. url = "/" + url;
  120. return url;
  121. }
  122. }
  123. return ""; // no main page URL could be identified
  124. };
  125. // Fetch optionally bypassing HTTP cache using fetch cache options
  126. function fetchWithBypass(request, bypassCache)
  127. {
  128. if (typeof request === "string")
  129. request = new Request(request);
  130. if (bypassCache)
  131. {
  132. return fetch(request.url, {
  133. headers: request.headers,
  134. mode: request.mode,
  135. credentials: request.credentials,
  136. redirect: request.redirect,
  137. cache: "no-store"
  138. });
  139. }
  140. else
  141. {
  142. // bypass disabled: perform normal fetch which is allowed to return from HTTP cache
  143. return fetch(request);
  144. }
  145. };
  146. // Effectively a cache.addAll() that only creates the cache on all requests being successful (as a weak attempt at making it atomic)
  147. // and can optionally cache-bypass with fetchWithBypass in every request
  148. async function CreateCacheFromFileList(cacheName, fileList, bypassCache)
  149. {
  150. // Kick off all requests and wait for them all to complete
  151. const responses = await Promise.all(fileList.map(url => fetchWithBypass(url, bypassCache)));
  152. // Check if any request failed. If so don't move on to opening the cache.
  153. // This makes sure we only open a cache if all requests succeeded.
  154. let allOk = true;
  155. for (const response of responses)
  156. {
  157. if (!response.ok)
  158. {
  159. allOk = false;
  160. console.error(CONSOLE_PREFIX + "Error fetching '" + response.url + "' (" + response.status + " " + response.statusText + ")");
  161. }
  162. }
  163. if (!allOk)
  164. throw new Error("not all resources were fetched successfully");
  165. // Can now assume all responses are OK. Open a cache and write all responses there.
  166. // TODO: ideally we can do this transactionally to ensure a complete cache is written as one atomic operation.
  167. // This needs either new transactional features in the spec, or at the very least a way to rename a cache
  168. // (so we can write to a temporary name that won't be returned by GetAvailableCacheNames() and then rename it when ready).
  169. const cache = await caches.open(cacheName);
  170. try {
  171. return await Promise.all(responses.map(
  172. (response, i) => cache.put(fileList[i], response)
  173. ));
  174. }
  175. catch (err)
  176. {
  177. // Not sure why cache.put() would fail (maybe if storage quota exceeded?) but in case it does,
  178. // clean up the cache to try to avoid leaving behind an incomplete cache.
  179. console.error(CONSOLE_PREFIX + "Error writing cache entries: ", err);
  180. caches.delete(cacheName);
  181. throw err;
  182. }
  183. };
  184. async function UpdateCheck(isFirst)
  185. {
  186. try {
  187. // Always bypass cache when requesting offline.js to make sure we find out about new versions.
  188. const response = await fetchWithBypass(OFFLINE_DATA_FILE, true);
  189. if (!response.ok)
  190. throw new Error(OFFLINE_DATA_FILE + " responded with " + response.status + " " + response.statusText);
  191. const data = await response.json();
  192. const version = data.version;
  193. const fileList = data.fileList;
  194. const lazyLoadList = data.lazyLoad;
  195. const currentCacheName = GetCacheVersionName(version);
  196. const cacheExists = await caches.has(currentCacheName);
  197. // Don't recache if there is already a cache that exists for this version. Assume it is complete.
  198. if (cacheExists)
  199. {
  200. // Log whether we are up-to-date or pending an update.
  201. const isUpdatePending = await IsUpdatePending();
  202. if (isUpdatePending)
  203. {
  204. console.log(CONSOLE_PREFIX + "Update pending");
  205. Broadcast("update-pending");
  206. }
  207. else
  208. {
  209. console.log(CONSOLE_PREFIX + "Up to date");
  210. Broadcast("up-to-date");
  211. }
  212. return;
  213. }
  214. // Implicitly add the main page URL to the file list, e.g. "index.html", so we don't have to assume a specific name.
  215. const mainPageUrl = await GetMainPageUrl();
  216. // Prepend the main page URL to the file list if we found one and it is not already in the list.
  217. // Also make sure we request the base / which should serve the main page.
  218. fileList.unshift("./");
  219. if (mainPageUrl && fileList.indexOf(mainPageUrl) === -1)
  220. fileList.unshift(mainPageUrl);
  221. console.log(CONSOLE_PREFIX + "Caching " + fileList.length + " files for offline use");
  222. if (isFirst)
  223. Broadcast("downloading");
  224. else
  225. BroadcastDownloadingUpdate(version);
  226. // Note we don't bypass the cache on the first update check. This is because SW installation and the following
  227. // update check caching will race with the normal page load requests. For any normal loading fetches that have already
  228. // completed or are in-flight, it is pointless and wasteful to cache-bust the request for offline caching, since that
  229. // forces a second network request to be issued when a response from the browser HTTP cache would be fine.
  230. if (lazyLoadList)
  231. await WriteLazyLoadListToStorage(lazyLoadList); // dump lazy load list to local storage#
  232. await CreateCacheFromFileList(currentCacheName, fileList, !isFirst);
  233. const isUpdatePending = await IsUpdatePending();
  234. if (isUpdatePending)
  235. {
  236. console.log(CONSOLE_PREFIX + "All resources saved, update ready");
  237. BroadcastUpdateReady(version);
  238. }
  239. else
  240. {
  241. console.log(CONSOLE_PREFIX + "All resources saved, offline support ready");
  242. Broadcast("offline-ready");
  243. }
  244. }
  245. catch (err)
  246. {
  247. // Update check fetches fail when we're offline, but in case there's any other kind of problem with it, log a warning.
  248. console.warn(CONSOLE_PREFIX + "Update check failed: ", err);
  249. }
  250. };
  251. self.addEventListener("install", event =>
  252. {
  253. // On install kick off an update check to cache files on first use.
  254. // If it fails we can still complete the install event and leave the SW running, we'll just
  255. // retry on the next navigate.
  256. event.waitUntil(
  257. UpdateCheck(true) // first update
  258. .catch(() => null)
  259. );
  260. });
  261. async function GetCacheNameToUse(availableCacheNames, doUpdateCheck)
  262. {
  263. // Prefer the oldest cache available. This avoids mixed-version responses by ensuring that if a new cache
  264. // is created and filled due to an update check while the page is running, we keep returning resources
  265. // from the original (oldest) cache only.
  266. if (availableCacheNames.length === 1 || !doUpdateCheck)
  267. return availableCacheNames[0];
  268. // We are making a navigate request with more than one cache available. Check if we can expire any old ones.
  269. const allClients = await clients.matchAll();
  270. // If there are other clients open, don't expire anything yet. We don't want to delete any caches they
  271. // might be using, which could cause mixed-version responses.
  272. if (allClients.length > 1)
  273. return availableCacheNames[0];
  274. // Identify newest cache to use. Delete all the others.
  275. const latestCacheName = availableCacheNames[availableCacheNames.length - 1];
  276. console.log(CONSOLE_PREFIX + "Updating to new version");
  277. await Promise.all(
  278. availableCacheNames.slice(0, -1)
  279. .map(c => caches.delete(c))
  280. );
  281. return latestCacheName;
  282. };
  283. async function HandleFetch(event, doUpdateCheck)
  284. {
  285. const availableCacheNames = await GetAvailableCacheNames();
  286. // No caches available: go to network
  287. if (!availableCacheNames.length)
  288. return fetch(event.request);
  289. const useCacheName = await GetCacheNameToUse(availableCacheNames, doUpdateCheck);
  290. const cache = await caches.open(useCacheName);
  291. const cachedResponse = await cache.match(event.request);
  292. if (cachedResponse)
  293. return cachedResponse; // use cached response
  294. // We need to check if this request is to be lazy-cached. Send the request and load the lazy-load list
  295. // from storage simultaneously.
  296. const result = await Promise.all([fetch(event.request), ReadLazyLoadListFromStorage()]);
  297. const fetchResponse = result[0];
  298. const lazyLoadList = result[1];
  299. if (IsUrlInLazyLoadList(event.request.url, lazyLoadList))
  300. {
  301. // Handle failure writing to the cache. This can happen if the storage quota is exceeded, which is particularly
  302. // likely in Safari 11.1, which appears to have very tight storage limits. Make sure even in the event of an error
  303. // we continue to return the response from the fetch.
  304. try {
  305. // Note clone response since we also respond with it
  306. await cache.put(event.request, fetchResponse.clone());
  307. }
  308. catch (err)
  309. {
  310. console.warn(CONSOLE_PREFIX + "Error caching '" + event.request.url + "': ", err);
  311. }
  312. }
  313. return fetchResponse;
  314. };
  315. self.addEventListener("fetch", event =>
  316. {
  317. /** NOTE (iain)
  318. * This check is to prevent a bug with XMLHttpRequest where if its
  319. * proxied with "FetchEvent.prototype.respondWith" no upload progress
  320. * events are triggered. By returning we allow the default action to
  321. * occur instead. Currently all cross-origin requests fall back to default.
  322. */
  323. if (new URL(event.request.url).origin !== location.origin)
  324. return;
  325. // Check for an update on navigate requests
  326. const doUpdateCheck = (event.request.mode === "navigate");
  327. const responsePromise = HandleFetch(event, doUpdateCheck);
  328. if (doUpdateCheck)
  329. {
  330. // allow the main request to complete, then check for updates
  331. event.waitUntil(
  332. responsePromise
  333. .then(() => UpdateCheck(false)) // not first check
  334. );
  335. }
  336. event.respondWith(responsePromise);
  337. });