1
0
mirror of https://github.com/gorhill/uBlock.git synced 2024-11-05 10:22:30 +01:00

Fine-tune cache storage related-code

Related discussion:
https://github.com/uBlockOrigin/uBlock-discussions/discussions/876

Related commit:
086766a924
This commit is contained in:
Raymond Hill 2024-02-27 15:04:05 -05:00
parent 4d88b5121c
commit 7590c0711d
No known key found for this signature in database
GPG Key ID: 25E1490B761470C2
4 changed files with 161 additions and 61 deletions

View File

@ -56,6 +56,7 @@ const hiddenSettingsDefault = {
blockingProfiles: '11111/#F00 11010/#C0F 11001/#00F 00001',
cacheStorageAPI: 'unset',
cacheStorageCompression: true,
cacheStorageCompressionThreshold: 65536,
cacheStorageMultithread: 2,
cacheControlForFirefox1376932: 'no-cache, no-store, must-revalidate',
cloudStorageCompression: true,

View File

@ -58,6 +58,19 @@ const shouldCache = bin => {
return out;
};
const missingKeys = (wanted, inbin, outbin) => {
inbin = inbin || {};
const found = Object.keys(inbin);
Object.assign(outbin, inbin);
if ( found.length === wanted.length ) { return; }
const missing = [];
for ( const key of wanted ) {
if ( outbin.hasOwnProperty(key) ) { continue; }
missing.push(key);
}
return missing;
};
/*******************************************************************************
*
* Extension storage
@ -68,11 +81,10 @@ const shouldCache = bin => {
const cacheStorage = (( ) => {
const LARGE = 65536;
const compress = async (key, data) => {
const isLarge = typeof data === 'string' && data.length >= LARGE;
const µbhs = µb.hiddenSettings;
const isLarge = typeof data === 'string' &&
data.length >= µbhs.cacheStorageCompressionThreshold;
const after = await scuo.serializeAsync(data, {
compress: isLarge && µbhs.cacheStorageCompression,
multithreaded: isLarge && µbhs.cacheStorageMultithread || 0,
@ -80,40 +92,39 @@ const cacheStorage = (( ) => {
return { key, data: after };
};
const decompress = async (key, data) => {
if ( scuo.canDeserialize(data) === false ) {
return { key, data };
}
const isLarge = data.length >= LARGE;
const after = await scuo.deserializeAsync(data, {
multithreaded: isLarge && µb.hiddenSettings.cacheStorageMultithread || 0,
const decompress = async (bin, key) => {
const data = bin[key];
if ( scuo.isSerialized(data) === false ) { return; }
const µbhs = µb.hiddenSettings;
const isLarge = data.length >= µbhs.cacheStorageCompressionThreshold;
bin[key] = await scuo.deserializeAsync(data, {
multithreaded: isLarge && µbhs.cacheStorageMultithread || 0,
});
return { key, data: after };
};
return {
name: 'browser.storage.local',
get(arg) {
const keys = arg;
return cacheAPI.get(keysFromGetArg(arg)).then(bin => {
if ( bin !== undefined ) { return bin; }
return extensionStorage.get(keys).catch(reason => {
ubolog(reason);
get(argbin) {
const outbin = {};
const wanted0 = keysFromGetArg(argbin);
return cacheAPI.get(wanted0).then(bin => {
const wanted1 = missingKeys(wanted0, bin, outbin);
if ( wanted1 === undefined ) { return; }
return extensionStorage.get(wanted1).then(bin => {
const wanted2 = missingKeys(wanted1, bin, outbin);
if ( wanted2 === undefined ) { return; }
if ( argbin instanceof Object === false ) { return; }
if ( Array.isArray(argbin) ) { return; }
for ( const key of wanted2 ) {
if ( argbin.hasOwnProperty(key) === false ) { continue; }
outbin[key] = argbin[key];
}
});
}).then(bin => {
if ( bin instanceof Object === false ) { return bin; }
}).then(( ) => {
const promises = [];
for ( const key of Object.keys(bin) ) {
promises.push(decompress(key, bin[key]));
for ( const key of Object.keys(outbin) ) {
promises.push(decompress(outbin, key));
}
return Promise.all(promises);
}).then(results => {
const bin = {};
for ( const { key, data } of results ) {
bin[key] = data;
}
return bin;
return Promise.all(promises).then(( ) => outbin);
}).catch(reason => {
ubolog(reason);
});
@ -183,8 +194,6 @@ const cacheStorage = (( ) => {
idbStorage.clear();
return Promise.all(toMigrate);
},
error: undefined
};
})();
@ -217,6 +226,7 @@ const cacheAPI = (( ) => {
}
resolve(caches.open(STORAGE_NAME).catch(reason => {
ubolog(reason);
return null;
}));
});
@ -232,7 +242,7 @@ const cacheAPI = (( ) => {
const cache = await cacheStoragePromise;
if ( cache === null ) { return; }
return cache.match(keyToURL(key)).then(response => {
if ( response instanceof Response === false ) { return; }
if ( response === undefined ) { return; }
return response.text();
}).then(text => {
if ( text === undefined ) { return; }
@ -302,7 +312,7 @@ const cacheAPI = (( ) => {
}
const responses = await Promise.all(toFetch);
for ( const response of responses ) {
if ( response instanceof Object === false ) { continue; }
if ( response === undefined ) { continue; }
const { key, text } = response;
if ( typeof key !== 'string' ) { continue; }
if ( typeof text !== 'string' ) { continue; }
@ -321,7 +331,7 @@ const cacheAPI = (( ) => {
).catch(( ) => []);
},
async set(keyvalStore) {
set(keyvalStore) {
const keys = Object.keys(keyvalStore);
if ( keys.length === 0 ) { return; }
const promises = [];
@ -331,7 +341,7 @@ const cacheAPI = (( ) => {
return Promise.all(promises);
},
async remove(keys) {
remove(keys) {
const toRemove = [];
if ( typeof keys === 'string' ) {
toRemove.push(removeOne(keys));
@ -343,7 +353,7 @@ const cacheAPI = (( ) => {
return Promise.all(toRemove);
},
async clear() {
clear() {
return globalThis.caches.delete(STORAGE_NAME).catch(reason => {
ubolog(reason);
});

View File

@ -948,7 +948,7 @@ const onMessage = function(request, sender, callback) {
case 'cloudPull':
request.decode = encoded => {
if ( scuo.canDeserialize(encoded) ) {
if ( scuo.isSerialized(encoded) ) {
return scuo.deserializeAsync(encoded, { thread: true });
}
// Legacy decoding: needs to be kept around for the foreseeable future.

View File

@ -1096,10 +1096,13 @@ export const deserialize = s => {
return data;
};
export const canDeserialize = s =>
export const isSerialized = s =>
typeof s === 'string' &&
(s.startsWith(MAGICLZ4PREFIX) || s.startsWith(MAGICPREFIX));
export const isCompressed = s =>
typeof s === 'string' && s.startsWith(MAGICLZ4PREFIX);
/*******************************************************************************
*
* Configuration
@ -1137,10 +1140,78 @@ export const setConfig = config => {
*
* */
const THREAD_AREYOUREADY = 1;
const THREAD_IAMREADY = 2;
const THREAD_SERIALIZE = 3;
const THREAD_DESERIALIZE = 4;
class MainThread {
constructor() {
this.jobs = [];
this.workload = 0;
this.timer = undefined;
this.busy = false;
}
process() {
if ( this.jobs.length === 0 ) { return; }
const job = this.jobs.shift();
this.workload -= job.size;
const result = job.what === THREAD_SERIALIZE
? serialize(job.data, job.options)
: deserialize(job.data);
job.resolve(result);
this.processAsync();
if ( this.jobs.length === 0 ) {
this.busy = false;
}
}
processAsync() {
if ( this.timer !== undefined ) { return; }
if ( this.jobs.length === 0 ) { return; }
this.timer = globalThis.requestIdleCallback(deadline => {
this.timer = undefined;
globalThis.queueMicrotask(( ) => {
this.timer = undefined;
this.process();
});
this.busy = deadline.timeRemaining() === 0;
}, { timeout: 7 });
}
serialize(data, options) {
return new Promise(resolve => {
this.workload += 1;
this.jobs.push({ what: THREAD_SERIALIZE, data, options, size: 1, resolve });
this.processAsync();
});
}
deserialize(data, options) {
return new Promise(resolve => {
const size = data.length;
this.workload += size;
this.jobs.push({ what: THREAD_DESERIALIZE, data, options, size, resolve });
this.processAsync();
});
}
get queueSize() {
return this.jobs.length + 1;
}
get workSize() {
return this.busy ? Number.MAX_SAFE_INTEGER : this.workload * 2;
}
}
class Thread {
constructor(gcer) {
this.jobs = new Map();
this.jobIdGenerator = 1;
this.workload = 0;
this.workerAccessTime = 0;
this.workerTimer = undefined;
this.gcer = gcer;
@ -1151,7 +1222,7 @@ class Thread {
worker.onmessage = ev => {
const msg = ev.data;
if ( isInstanceOf(msg, 'Object') === false ) { return; }
if ( msg.what === 'ready!' ) {
if ( msg.what === THREAD_IAMREADY ) {
worker.onmessage = ev => { this.onmessage(ev); };
worker.onerror = null;
resolve(worker);
@ -1161,7 +1232,10 @@ class Thread {
worker.onmessage = worker.onerror = null;
resolve(null);
};
worker.postMessage({ what: 'ready?', config: currentConfig });
worker.postMessage({
what: THREAD_AREYOUREADY,
config: currentConfig,
});
} catch(ex) {
console.info(ex);
worker.onmessage = worker.onerror = null;
@ -1194,6 +1268,7 @@ class Thread {
if ( resolve === undefined ) { return; }
this.jobs.delete(job.id);
resolve(job.result);
this.workload -= job.size;
if ( this.jobs.size !== 0 ) { return; }
this.countdownWorker();
}
@ -1208,7 +1283,8 @@ class Thread {
}
const id = this.jobIdGenerator++;
return new Promise(resolve => {
const job = { what: 'serialize', id, data, options };
this.workload += 1;
const job = { what: THREAD_SERIALIZE, id, data, options, size: 1 };
this.jobs.set(job.id, resolve);
worker.postMessage(job);
});
@ -1224,25 +1300,36 @@ class Thread {
}
const id = this.jobIdGenerator++;
return new Promise(resolve => {
const job = { what: 'deserialize', id, data, options };
const size = data.length;
this.workload += size;
const job = { what: THREAD_DESERIALIZE, id, data, options, size };
this.jobs.set(job.id, resolve);
worker.postMessage(job);
});
}
get queueSize() {
return this.jobs.size;
}
get workSize() {
return this.workload;
}
}
const threads = {
pool: [],
pool: [ new MainThread() ],
thread(maxPoolSize) {
for ( const thread of this.pool ) {
if ( thread.jobs.size === 0 ) { return thread; }
}
const len = this.pool.length;
if ( len !== 0 && len >= maxPoolSize ) {
if ( len === 1 ) { return this.pool[0]; }
return this.pool.reduce((best, candidate) =>
candidate.jobs.size < best.jobs.size ? candidate : best
);
const poolSize = this.pool.length;
if ( poolSize !== 0 && poolSize >= maxPoolSize ) {
if ( poolSize === 1 ) { return this.pool[0]; }
return this.pool.reduce((best, candidate) => {
if ( candidate.queueSize === 0 ) { return candidate; }
if ( best.queueSize === 0 ) { return best; }
return candidate.workSize < best.workSize
? candidate
: best;
});
}
const thread = new Thread(thread => {
const pos = this.pool.indexOf(thread);
@ -1267,6 +1354,7 @@ export async function serializeAsync(data, options = {}) {
}
export async function deserializeAsync(data, options = {}) {
if ( isSerialized(data) === false ) { return data; }
const maxThreadCount = options.multithreaded || 0;
if ( maxThreadCount === 0 ) {
return deserialize(data, options);
@ -1288,16 +1376,17 @@ if ( isInstanceOf(globalThis, 'DedicatedWorkerGlobalScope') ) {
globalThis.onmessage = ev => {
const msg = ev.data;
switch ( msg.what ) {
case 'ready?':
case THREAD_AREYOUREADY:
setConfig(msg.config);
globalThis.postMessage({ what: 'ready!' });
globalThis.postMessage({ what: THREAD_IAMREADY });
break;
case 'serialize':
case 'deserialize': {
const result = msg.what === 'serialize'
? serialize(msg.data, msg.options)
: deserialize(msg.data);
globalThis.postMessage({ id: msg.id, result });
case THREAD_SERIALIZE:
const result = serialize(msg.data, msg.options);
globalThis.postMessage({ id: msg.id, size: msg.size, result });
break;
case THREAD_DESERIALIZE: {
const result = deserialize(msg.data);
globalThis.postMessage({ id: msg.id, size: msg.size, result });
break;
}
}