Cache media (#6902)
This commit is contained in:
parent
5eea3f9877
commit
e829c21cc6
10 changed files with 189 additions and 103 deletions
|
@ -1,5 +1,5 @@
|
|||
import api, { getLinks } from '../api';
|
||||
import asyncDB from '../db/async';
|
||||
import asyncDB from '../storage/db';
|
||||
import { importAccount, importFetchedAccount, importFetchedAccounts } from './importer';
|
||||
|
||||
export const ACCOUNT_FETCH_REQUEST = 'ACCOUNT_FETCH_REQUEST';
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { putAccounts, putStatuses } from '../../db/modifier';
|
||||
import { putAccounts, putStatuses } from '../../storage/modifier';
|
||||
import { normalizeAccount, normalizeStatus } from './normalizer';
|
||||
|
||||
export const ACCOUNT_IMPORT = 'ACCOUNT_IMPORT';
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import api from '../api';
|
||||
import asyncDB from '../db/async';
|
||||
import { evictStatus } from '../db/modifier';
|
||||
import asyncDB from '../storage/db';
|
||||
import { evictStatus } from '../storage/modifier';
|
||||
|
||||
import { deleteFromTimelines } from './timelines';
|
||||
import { fetchStatusCard } from './cards';
|
||||
|
|
|
@ -1,93 +0,0 @@
|
|||
import asyncDB from './async';
|
||||
|
||||
const limit = 1024;
|
||||
|
||||
function put(name, objects, callback) {
|
||||
asyncDB.then(db => {
|
||||
const putTransaction = db.transaction(name, 'readwrite');
|
||||
const putStore = putTransaction.objectStore(name);
|
||||
const putIndex = putStore.index('id');
|
||||
|
||||
objects.forEach(object => {
|
||||
function add() {
|
||||
putStore.add(object);
|
||||
}
|
||||
|
||||
putIndex.getKey(object.id).onsuccess = retrieval => {
|
||||
if (retrieval.target.result) {
|
||||
putStore.delete(retrieval.target.result).onsuccess = add;
|
||||
} else {
|
||||
add();
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
putTransaction.oncomplete = () => {
|
||||
const readTransaction = db.transaction(name, 'readonly');
|
||||
const readStore = readTransaction.objectStore(name);
|
||||
|
||||
readStore.count().onsuccess = count => {
|
||||
const excess = count.target.result - limit;
|
||||
|
||||
if (excess > 0) {
|
||||
readStore.getAll(null, excess).onsuccess =
|
||||
retrieval => callback(retrieval.target.result.map(({ id }) => id));
|
||||
}
|
||||
};
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function evictAccounts(ids) {
|
||||
asyncDB.then(db => {
|
||||
const transaction = db.transaction(['accounts', 'statuses'], 'readwrite');
|
||||
const accounts = transaction.objectStore('accounts');
|
||||
const accountsIdIndex = accounts.index('id');
|
||||
const accountsMovedIndex = accounts.index('moved');
|
||||
const statuses = transaction.objectStore('statuses');
|
||||
const statusesIndex = statuses.index('account');
|
||||
|
||||
function evict(toEvict) {
|
||||
toEvict.forEach(id => {
|
||||
accountsMovedIndex.getAllKeys(id).onsuccess =
|
||||
({ target }) => evict(target.result);
|
||||
|
||||
statusesIndex.getAll(id).onsuccess =
|
||||
({ target }) => evictStatuses(target.result.map(({ id }) => id));
|
||||
|
||||
accountsIdIndex.getKey(id).onsuccess =
|
||||
({ target }) => target.result && accounts.delete(target.result);
|
||||
});
|
||||
}
|
||||
|
||||
evict(ids);
|
||||
});
|
||||
}
|
||||
|
||||
export function evictStatus(id) {
|
||||
return evictStatuses([id]);
|
||||
}
|
||||
|
||||
export function evictStatuses(ids) {
|
||||
asyncDB.then(db => {
|
||||
const store = db.transaction('statuses', 'readwrite').objectStore('statuses');
|
||||
const idIndex = store.index('id');
|
||||
const reblogIndex = store.index('reblog');
|
||||
|
||||
ids.forEach(id => {
|
||||
reblogIndex.getAllKeys(id).onsuccess =
|
||||
({ target }) => target.result.forEach(reblogKey => store.delete(reblogKey));
|
||||
|
||||
idIndex.getKey(id).onsuccess =
|
||||
({ target }) => target.result && store.delete(target.result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function putAccounts(records) {
|
||||
put('accounts', records, evictAccounts);
|
||||
}
|
||||
|
||||
export function putStatuses(records) {
|
||||
put('statuses', records, evictStatuses);
|
||||
}
|
|
@ -1,6 +1,10 @@
|
|||
import './web_push_notifications';
|
||||
|
||||
function openCache() {
|
||||
function openSystemCache() {
|
||||
return caches.open('mastodon-system');
|
||||
}
|
||||
|
||||
function openWebCache() {
|
||||
return caches.open('mastodon-web');
|
||||
}
|
||||
|
||||
|
@ -11,7 +15,7 @@ function fetchRoot() {
|
|||
// Cause a new version of a registered Service Worker to replace an existing one
|
||||
// that is already installed, and replace the currently active worker on open pages.
|
||||
self.addEventListener('install', function(event) {
|
||||
event.waitUntil(Promise.all([openCache(), fetchRoot()]).then(([cache, root]) => cache.put('/', root)));
|
||||
event.waitUntil(Promise.all([openWebCache(), fetchRoot()]).then(([cache, root]) => cache.put('/', root)));
|
||||
});
|
||||
self.addEventListener('activate', function(event) {
|
||||
event.waitUntil(self.clients.claim());
|
||||
|
@ -21,7 +25,7 @@ self.addEventListener('fetch', function(event) {
|
|||
|
||||
if (url.pathname.startsWith('/web/')) {
|
||||
const asyncResponse = fetchRoot();
|
||||
const asyncCache = openCache();
|
||||
const asyncCache = openWebCache();
|
||||
|
||||
event.respondWith(asyncResponse.then(async response => {
|
||||
if (response.ok) {
|
||||
|
@ -31,10 +35,10 @@ self.addEventListener('fetch', function(event) {
|
|||
}
|
||||
|
||||
throw null;
|
||||
}).catch(() => caches.match('/')));
|
||||
}).catch(() => asyncCache.then(cache => cache.match('/'))));
|
||||
} else if (url.pathname === '/auth/sign_out') {
|
||||
const asyncResponse = fetch(event.request);
|
||||
const asyncCache = openCache();
|
||||
const asyncCache = openWebCache();
|
||||
|
||||
event.respondWith(asyncResponse.then(async response => {
|
||||
if (response.ok || response.type === 'opaqueredirect') {
|
||||
|
@ -44,5 +48,21 @@ self.addEventListener('fetch', function(event) {
|
|||
|
||||
return response;
|
||||
}));
|
||||
} else if (process.env.CDN_HOST ? url.host === process.env.CDN_HOST : url.pathname.startsWith('/system/')) {
|
||||
event.respondWith(openSystemCache().then(async cache => {
|
||||
const cached = await cache.match(event.request.url);
|
||||
|
||||
if (cached === undefined) {
|
||||
const fetched = await fetch(event.request);
|
||||
|
||||
if (fetched.ok) {
|
||||
await cache.put(event.request.url, fetched);
|
||||
}
|
||||
|
||||
return fetched.clone();
|
||||
}
|
||||
|
||||
return cached;
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
|
151
app/javascript/mastodon/storage/modifier.js
Normal file
151
app/javascript/mastodon/storage/modifier.js
Normal file
|
@ -0,0 +1,151 @@
|
|||
import asyncDB from './db';
|
||||
import { autoPlayGif } from '../initial_state';
|
||||
|
||||
const accountAssetKeys = ['avatar', 'avatar_static', 'header', 'header_static'];
|
||||
const avatarKey = autoPlayGif ? 'avatar' : 'avatar_static';
|
||||
const limit = 1024;
|
||||
const asyncCache = caches.open('mastodon-system');
|
||||
|
||||
function put(name, objects, onupdate, oncreate) {
|
||||
return asyncDB.then(db => new Promise((resolve, reject) => {
|
||||
const putTransaction = db.transaction(name, 'readwrite');
|
||||
const putStore = putTransaction.objectStore(name);
|
||||
const putIndex = putStore.index('id');
|
||||
|
||||
objects.forEach(object => {
|
||||
putIndex.getKey(object.id).onsuccess = retrieval => {
|
||||
function addObject() {
|
||||
putStore.add(object);
|
||||
}
|
||||
|
||||
function deleteObject() {
|
||||
putStore.delete(retrieval.target.result).onsuccess = addObject;
|
||||
}
|
||||
|
||||
if (retrieval.target.result) {
|
||||
if (onupdate) {
|
||||
onupdate(object, retrieval.target.result, putStore, deleteObject);
|
||||
} else {
|
||||
deleteObject();
|
||||
}
|
||||
} else {
|
||||
if (oncreate) {
|
||||
oncreate(object, addObject);
|
||||
} else {
|
||||
addObject();
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
putTransaction.oncomplete = () => {
|
||||
const readTransaction = db.transaction(name, 'readonly');
|
||||
const readStore = readTransaction.objectStore(name);
|
||||
const count = readStore.count();
|
||||
|
||||
count.onsuccess = () => {
|
||||
const excess = count.result - limit;
|
||||
|
||||
if (excess > 0) {
|
||||
const retrieval = readStore.getAll(null, excess);
|
||||
|
||||
retrieval.onsuccess = () => resolve(retrieval.result);
|
||||
retrieval.onerror = reject;
|
||||
} else {
|
||||
resolve([]);
|
||||
}
|
||||
};
|
||||
|
||||
count.onerror = reject;
|
||||
};
|
||||
|
||||
putTransaction.onerror = reject;
|
||||
}));
|
||||
}
|
||||
|
||||
function evictAccountsByRecords(records) {
|
||||
asyncDB.then(db => {
|
||||
const transaction = db.transaction(['accounts', 'statuses'], 'readwrite');
|
||||
const accounts = transaction.objectStore('accounts');
|
||||
const accountsIdIndex = accounts.index('id');
|
||||
const accountsMovedIndex = accounts.index('moved');
|
||||
const statuses = transaction.objectStore('statuses');
|
||||
const statusesIndex = statuses.index('account');
|
||||
|
||||
function evict(toEvict) {
|
||||
toEvict.forEach(record => {
|
||||
asyncCache.then(cache => accountAssetKeys.forEach(key => cache.delete(records[key])));
|
||||
|
||||
accountsMovedIndex.getAll(record.id).onsuccess = ({ target }) => evict(target.result);
|
||||
|
||||
statusesIndex.getAll(record.id).onsuccess =
|
||||
({ target }) => evictStatusesByRecords(target.result);
|
||||
|
||||
accountsIdIndex.getKey(record.id).onsuccess =
|
||||
({ target }) => target.result && accounts.delete(target.result);
|
||||
});
|
||||
}
|
||||
|
||||
evict(records);
|
||||
});
|
||||
}
|
||||
|
||||
export function evictStatus(id) {
|
||||
return evictStatuses([id]);
|
||||
}
|
||||
|
||||
export function evictStatuses(ids) {
|
||||
asyncDB.then(db => {
|
||||
const store = db.transaction('statuses', 'readwrite').objectStore('statuses');
|
||||
const idIndex = store.index('id');
|
||||
const reblogIndex = store.index('reblog');
|
||||
|
||||
ids.forEach(id => {
|
||||
reblogIndex.getAllKeys(id).onsuccess =
|
||||
({ target }) => target.result.forEach(reblogKey => store.delete(reblogKey));
|
||||
|
||||
idIndex.getKey(id).onsuccess =
|
||||
({ target }) => target.result && store.delete(target.result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function evictStatusesByRecords(records) {
|
||||
evictStatuses(records.map(({ id }) => id));
|
||||
}
|
||||
|
||||
export function putAccounts(records) {
|
||||
const newURLs = [];
|
||||
|
||||
put('accounts', records, (newRecord, oldKey, store, oncomplete) => {
|
||||
store.get(oldKey).onsuccess = ({ target }) => {
|
||||
accountAssetKeys.forEach(key => {
|
||||
const newURL = newRecord[key];
|
||||
const oldURL = target.result[key];
|
||||
|
||||
if (newURL !== oldURL) {
|
||||
asyncCache.then(cache => cache.delete(oldURL));
|
||||
}
|
||||
});
|
||||
|
||||
const newURL = newRecord[avatarKey];
|
||||
const oldURL = target.result[avatarKey];
|
||||
|
||||
if (newURL !== oldURL) {
|
||||
newURLs.push(newURL);
|
||||
}
|
||||
|
||||
oncomplete();
|
||||
};
|
||||
}, (newRecord, oncomplete) => {
|
||||
newURLs.push(newRecord[avatarKey]);
|
||||
oncomplete();
|
||||
}).then(records => {
|
||||
evictAccountsByRecords(records);
|
||||
asyncCache.then(cache => cache.addAll(newURLs));
|
||||
});
|
||||
}
|
||||
|
||||
export function putStatuses(records) {
|
||||
put('statuses', records).then(evictStatusesByRecords);
|
||||
}
|
|
@ -90,7 +90,7 @@ module.exports = merge(sharedConfig, {
|
|||
'**/*.woff',
|
||||
],
|
||||
ServiceWorker: {
|
||||
entry: path.join(__dirname, '../../app/javascript/mastodon/service_worker/entry.js'),
|
||||
entry: `imports-loader?process.env=>${encodeURIComponent(JSON.stringify(process.env))}!${encodeURI(path.join(__dirname, '../../app/javascript/mastodon/service_worker/entry.js'))}`,
|
||||
cacheName: 'mastodon',
|
||||
output: '../assets/sw.js',
|
||||
publicPath: '/sw.js',
|
||||
|
|
|
@ -56,6 +56,7 @@
|
|||
"glob": "^7.1.1",
|
||||
"http-link-header": "^0.8.0",
|
||||
"immutable": "^3.8.2",
|
||||
"imports-loader": "^0.8.0",
|
||||
"intersection-observer": "^0.5.0",
|
||||
"intl": "^1.2.5",
|
||||
"intl-messageformat": "^2.2.0",
|
||||
|
|
|
@ -3364,6 +3364,13 @@ import-local@^0.1.1:
|
|||
pkg-dir "^2.0.0"
|
||||
resolve-cwd "^2.0.0"
|
||||
|
||||
imports-loader@^0.8.0:
|
||||
version "0.8.0"
|
||||
resolved "https://registry.yarnpkg.com/imports-loader/-/imports-loader-0.8.0.tgz#030ea51b8ca05977c40a3abfd9b4088fe0be9a69"
|
||||
dependencies:
|
||||
loader-utils "^1.0.2"
|
||||
source-map "^0.6.1"
|
||||
|
||||
imurmurhash@^0.1.4:
|
||||
version "0.1.4"
|
||||
resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
|
||||
|
|
Loading…
Reference in a new issue