1
0
mirror of https://github.com/gorhill/uBlock.git synced 2024-11-17 16:02:33 +01:00
uBlock/src/js/assets.js

1047 lines
33 KiB
JavaScript
Raw Normal View History

2014-06-24 00:42:43 +02:00
/*******************************************************************************
uBlock Origin - a browser extension to block requests.
Copyright (C) 2014-present Raymond Hill
2014-06-24 00:42:43 +02:00
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see {http://www.gnu.org/licenses/}.
Home: https://github.com/gorhill/uBlock
*/
'use strict';
2014-06-24 00:42:43 +02:00
/******************************************************************************/
µBlock.assets = (function() {
/******************************************************************************/
const reIsExternalPath = /^(?:[a-z-]+):\/\//,
reIsUserAsset = /^user-/,
errorCantConnectTo = vAPI.i18n('errorCantConnectTo'),
noopfunc = function(){};
2014-09-08 23:46:58 +02:00
const api = {};
/******************************************************************************/
var observers = [];
2014-06-24 00:42:43 +02:00
api.addObserver = function(observer) {
if ( observers.indexOf(observer) === -1 ) {
observers.push(observer);
}
2014-12-20 21:28:16 +01:00
};
api.removeObserver = function(observer) {
var pos;
while ( (pos = observers.indexOf(observer)) !== -1 ) {
observers.splice(pos, 1);
}
2014-12-20 21:28:16 +01:00
};
var fireNotification = function(topic, details) {
var result, r;
for ( var i = 0; i < observers.length; i++ ) {
r = observers[i](topic, details);
if ( r !== undefined ) { result = r; }
}
return result;
};
/******************************************************************************/
2017-03-05 18:54:47 +01:00
api.fetchText = function(url, onLoad, onError) {
const isExternal = reIsExternalPath.test(url);
let actualUrl = isExternal ? url : vAPI.getURL(url);
2017-05-08 17:49:48 +02:00
// https://github.com/gorhill/uBlock/issues/2592
// Force browser cache to be bypassed, but only for resources which have
// been fetched more than one hour ago.
if ( isExternal ) {
const queryValue = '_=' + Math.floor(Date.now() / 7200000);
2017-05-08 17:49:48 +02:00
if ( actualUrl.indexOf('?') === -1 ) {
2017-05-08 18:12:56 +02:00
actualUrl += '?';
2017-05-08 17:49:48 +02:00
} else {
2017-05-08 18:12:56 +02:00
actualUrl += '&';
2017-05-08 17:49:48 +02:00
}
2017-05-08 18:12:56 +02:00
actualUrl += queryValue;
2017-05-08 17:49:48 +02:00
}
2015-02-24 21:35:32 +01:00
2015-10-14 16:28:37 +02:00
if ( typeof onError !== 'function' ) {
onError = onLoad;
}
const timeoutAfter = µBlock.hiddenSettings.assetFetchTimeout * 1000 || 30000;
const xhr = new XMLHttpRequest();
let contentLoaded = 0;
let timeoutTimer;
const cleanup = function() {
xhr.removeEventListener('load', onLoadEvent);
xhr.removeEventListener('error', onErrorEvent);
xhr.removeEventListener('abort', onErrorEvent);
xhr.removeEventListener('progress', onProgressEvent);
if ( timeoutTimer !== undefined ) {
clearTimeout(timeoutTimer);
timeoutTimer = undefined;
}
};
// https://github.com/gorhill/uMatrix/issues/15
const onLoadEvent = function() {
cleanup();
2015-02-24 00:31:29 +01:00
// xhr for local files gives status 0, but actually succeeds
const details = {
url,
2017-03-05 18:54:47 +01:00
content: '',
statusCode: this.status || 200,
statusText: this.statusText || ''
};
if ( details.statusCode < 200 || details.statusCode >= 300 ) {
return onError.call(null, details);
2015-02-15 13:16:31 +01:00
}
2015-02-24 00:31:29 +01:00
// consider an empty result to be an error
if ( stringIsNotEmpty(this.responseText) === false ) {
2017-03-05 18:54:47 +01:00
return onError.call(null, details);
2015-02-15 13:16:31 +01:00
}
// we never download anything else than plain text: discard if response
2015-02-24 00:31:29 +01:00
// appears to be a HTML document: could happen when server serves
2015-02-15 13:16:31 +01:00
// some kind of error page I suppose
const text = this.responseText.trim();
if ( text.startsWith('<') && text.endsWith('>') ) {
2017-03-05 18:54:47 +01:00
return onError.call(null, details);
}
2017-03-05 18:54:47 +01:00
details.content = this.responseText;
onLoad(details);
};
2015-02-24 21:35:32 +01:00
const onErrorEvent = function() {
cleanup();
µBlock.logger.writeOne({
realm: 'message',
type: 'error',
text: errorCantConnectTo.replace('{{msg}}', actualUrl)
});
onError({ url, content: '' });
};
const onTimeout = function() {
xhr.abort();
};
// https://github.com/gorhill/uBlock/issues/2526
// - Timeout only when there is no progress.
const onProgressEvent = function(ev) {
if ( ev.loaded === contentLoaded ) { return; }
contentLoaded = ev.loaded;
if ( timeoutTimer !== undefined ) {
clearTimeout(timeoutTimer);
}
timeoutTimer = vAPI.setTimeout(onTimeout, timeoutAfter);
2015-02-24 00:31:29 +01:00
};
2015-02-24 21:35:32 +01:00
// Be ready for thrown exceptions:
// I am pretty sure it used to work, but now using a URL such as
// `file:///` on Chromium 40 results in an exception being thrown.
2015-02-24 00:31:29 +01:00
try {
2017-03-05 18:54:47 +01:00
xhr.open('get', actualUrl, true);
xhr.addEventListener('load', onLoadEvent);
xhr.addEventListener('error', onErrorEvent);
xhr.addEventListener('abort', onErrorEvent);
xhr.addEventListener('progress', onProgressEvent);
2015-02-24 00:31:29 +01:00
xhr.responseType = 'text';
xhr.send();
timeoutTimer = vAPI.setTimeout(onTimeout, timeoutAfter);
2015-02-24 00:31:29 +01:00
} catch (e) {
onErrorEvent.call(xhr);
2015-02-24 00:31:29 +01:00
}
2014-06-24 00:42:43 +02:00
};
/******************************************************************************/
// https://github.com/gorhill/uBlock/issues/3331
// Support the seamless loading of sublists.
api.fetchFilterList = function(mainlistURL, onLoad, onError) {
const content = [];
const pendingSublistURLs = new Set([ mainlistURL ]);
const loadedSublistURLs = new Set();
const toParsedURL = api.fetchFilterList.toParsedURL;
// https://github.com/NanoAdblocker/NanoCore/issues/239
// Anything under URL's root directory is allowed to be fetched. The
// URL of a sublist will always be relative to the URL of the parent
// list (instead of the URL of the root list).
const rootDirectoryURL = toParsedURL(mainlistURL);
if ( rootDirectoryURL !== undefined ) {
const pos = rootDirectoryURL.pathname.lastIndexOf('/');
if ( pos !== -1 ) {
rootDirectoryURL.pathname =
rootDirectoryURL.pathname.slice(0, pos + 1);
}
}
let errored = false;
const processIncludeDirectives = function(details) {
const reInclude = /^!#include +(\S+)/gm;
const out = [];
const content = details.content;
let lastIndex = 0;
for (;;) {
const match = reInclude.exec(content);
if ( match === null ) { break; }
if ( toParsedURL(match[1]) !== undefined ) { continue; }
if ( match[1].indexOf('..') !== -1 ) { continue; }
const subURL = toParsedURL(details.url);
subURL.pathname = subURL.pathname.replace(/[^/]+$/, match[1]);
if ( subURL.href.startsWith(rootDirectoryURL.href) === false ) {
continue;
}
if ( pendingSublistURLs.has(subURL.href) ) { continue; }
if ( loadedSublistURLs.has(subURL.href) ) { continue; }
pendingSublistURLs.add(subURL.href);
api.fetchText(subURL.href, onLocalLoadSuccess, onLocalLoadError);
out.push(content.slice(lastIndex, match.index).trim(), subURL.href);
lastIndex = reInclude.lastIndex;
}
out.push(lastIndex === 0 ? content : content.slice(lastIndex).trim());
return out;
};
const onLocalLoadSuccess = function(details) {
if ( errored ) { return; }
const isSublist = details.url !== mainlistURL;
pendingSublistURLs.delete(details.url);
loadedSublistURLs.add(details.url);
// https://github.com/uBlockOrigin/uBlock-issues/issues/329
// Insert fetched content at position of related #!include directive
let slot = isSublist ? content.indexOf(details.url) : 0;
if ( isSublist ) {
content.splice(
slot,
1,
'! >>>>>>>> ' + details.url,
details.content.trim(),
'! <<<<<<<< ' + details.url
);
slot += 1;
} else {
content[0] = details.content.trim();
}
// Find and process #!include directives
if (
rootDirectoryURL !== undefined &&
rootDirectoryURL.pathname.length > 0
) {
const processed = processIncludeDirectives(details);
if ( processed.length > 1 ) {
content.splice(slot, 1, ...processed);
}
}
2018-03-18 18:56:20 +01:00
if ( pendingSublistURLs.size !== 0 ) { return; }
details.url = mainlistURL;
details.content = content.join('\n').trim();
onLoad(details);
};
// https://github.com/AdguardTeam/FiltersRegistry/issues/82
// Not checking for `errored` status was causing repeated notifications
2018-07-06 16:47:51 +02:00
// to the caller. This can happen when more than one out of multiple
// sublists can't be fetched.
const onLocalLoadError = function(details) {
if ( errored ) { return; }
errored = true;
details.url = mainlistURL;
details.content = '';
onError(details);
};
this.fetchText(mainlistURL, onLocalLoadSuccess, onLocalLoadError);
};
api.fetchFilterList.toParsedURL = function(url) {
try {
return new URL(url);
} catch (ex) {
}
};
/*******************************************************************************
2014-07-22 18:26:11 +02:00
The purpose of the asset source registry is to keep key detail information
about an asset:
- Where to load it from: this may consist of one or more URLs, either local
or remote.
- After how many days an asset should be deemed obsolete -- i.e. in need of
an update.
- The origin and type of an asset.
- The last time an asset was registered.
**/
let assetSourceRegistryPromise,
assetSourceRegistry = Object.create(null);
const registerAssetSource = function(assetKey, dict) {
const entry = assetSourceRegistry[assetKey] || {};
for ( const prop in dict ) {
if ( dict.hasOwnProperty(prop) === false ) { continue; }
if ( dict[prop] === undefined ) {
delete entry[prop];
} else {
entry[prop] = dict[prop];
}
2015-01-16 16:57:56 +01:00
}
let contentURL = dict.contentURL;
if ( contentURL !== undefined ) {
if ( typeof contentURL === 'string' ) {
contentURL = entry.contentURL = [ contentURL ];
} else if ( Array.isArray(contentURL) === false ) {
contentURL = entry.contentURL = [];
}
let remoteURLCount = 0;
for ( let i = 0; i < contentURL.length; i++ ) {
if ( reIsExternalPath.test(contentURL[i]) ) {
remoteURLCount += 1;
}
}
entry.hasLocalURL = remoteURLCount !== contentURL.length;
entry.hasRemoteURL = remoteURLCount !== 0;
} else if ( entry.contentURL === undefined ) {
entry.contentURL = [];
2014-12-20 21:28:16 +01:00
}
if ( typeof entry.updateAfter !== 'number' ) {
entry.updateAfter = 5;
}
if ( entry.submitter ) {
entry.submitTime = Date.now(); // To detect stale entries
2014-06-24 00:42:43 +02:00
}
assetSourceRegistry[assetKey] = entry;
};
2014-06-24 00:42:43 +02:00
const unregisterAssetSource = function(assetKey) {
assetCacheRemove(assetKey);
delete assetSourceRegistry[assetKey];
};
2014-07-22 18:26:11 +02:00
const saveAssetSourceRegistry = (function() {
let timer;
const save = function() {
timer = undefined;
µBlock.cacheStorage.set({ assetSourceRegistry: assetSourceRegistry });
2014-06-24 00:42:43 +02:00
};
return function(lazily) {
if ( timer !== undefined ) {
clearTimeout(timer);
}
if ( lazily ) {
timer = vAPI.setTimeout(save, 500);
} else {
save();
}
2014-06-24 00:42:43 +02:00
};
})();
2014-06-24 00:42:43 +02:00
const updateAssetSourceRegistry = function(json, silent) {
let newDict;
try {
newDict = JSON.parse(json);
} catch (ex) {
}
if ( newDict instanceof Object === false ) { return; }
2014-06-24 00:42:43 +02:00
const oldDict = assetSourceRegistry;
2017-01-26 16:17:38 +01:00
// Remove obsolete entries (only those which were built-in).
for ( const assetKey in oldDict ) {
2017-01-26 16:17:38 +01:00
if (
newDict[assetKey] === undefined &&
oldDict[assetKey].submitter === undefined
) {
unregisterAssetSource(assetKey);
2014-07-22 18:26:11 +02:00
}
2017-01-26 16:17:38 +01:00
}
// Add/update existing entries. Notify of new asset sources.
for ( const assetKey in newDict ) {
2017-01-26 16:17:38 +01:00
if ( oldDict[assetKey] === undefined && !silent ) {
fireNotification(
'builtin-asset-source-added',
{ assetKey: assetKey, entry: newDict[assetKey] }
);
}
registerAssetSource(assetKey, newDict[assetKey]);
}
saveAssetSourceRegistry();
2014-06-24 00:42:43 +02:00
};
const getAssetSourceRegistry = function(callback) {
if ( assetSourceRegistryPromise === undefined ) {
assetSourceRegistryPromise = µBlock.cacheStorage.get(
'assetSourceRegistry'
).then(bin => {
if (
bin instanceof Object &&
bin.assetSourceRegistry instanceof Object
) {
assetSourceRegistry = bin.assetSourceRegistry;
return;
}
return new Promise(resolve => {
api.fetchText(
µBlock.assetsBootstrapLocation || 'assets/assets.json',
details => {
updateAssetSourceRegistry(details.content, true);
resolve();
}
);
});
});
}
2014-09-05 22:15:42 +02:00
assetSourceRegistryPromise.then(( ) => {
callback(assetSourceRegistry);
});
};
2014-06-24 00:42:43 +02:00
api.registerAssetSource = function(assetKey, details) {
getAssetSourceRegistry(function() {
registerAssetSource(assetKey, details);
saveAssetSourceRegistry(true);
});
2014-06-24 00:42:43 +02:00
};
api.unregisterAssetSource = function(assetKey) {
getAssetSourceRegistry(function() {
unregisterAssetSource(assetKey);
saveAssetSourceRegistry(true);
});
};
2014-08-20 15:24:16 +02:00
/*******************************************************************************
The purpose of the asset cache registry is to keep track of all assets
which have been persisted into the local cache.
2014-06-24 00:42:43 +02:00
**/
2014-07-25 22:12:20 +02:00
const assetCacheRegistryStartTime = Date.now();
let assetCacheRegistryPromise;
let assetCacheRegistry = {};
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const getAssetCacheRegistry = function() {
if ( assetCacheRegistryPromise === undefined ) {
assetCacheRegistryPromise = µBlock.cacheStorage.get(
'assetCacheRegistry'
).then(bin => {
if (
bin instanceof Object &&
bin.assetCacheRegistry instanceof Object
) {
assetCacheRegistry = bin.assetCacheRegistry;
}
});
}
2014-06-24 00:42:43 +02:00
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
return assetCacheRegistryPromise.then(( ) => assetCacheRegistry);
2014-06-24 00:42:43 +02:00
};
const saveAssetCacheRegistry = (function() {
let timer;
const save = function() {
timer = undefined;
µBlock.cacheStorage.set({ assetCacheRegistry });
};
return function(lazily) {
if ( timer !== undefined ) { clearTimeout(timer); }
if ( lazily ) {
timer = vAPI.setTimeout(save, 500);
} else {
save();
}
};
})();
2014-07-25 22:12:20 +02:00
const assetCacheRead = function(assetKey, callback) {
const internalKey = 'cache/' + assetKey;
2014-07-25 22:12:20 +02:00
const reportBack = function(content) {
2018-08-06 18:34:41 +02:00
if ( content instanceof Blob ) { content = ''; }
let details = { assetKey: assetKey, content: content };
2018-08-06 18:34:41 +02:00
if ( content === '' ) { details.error = 'E_NOTFOUND'; }
callback(details);
};
2014-06-24 00:42:43 +02:00
const onAssetRead = function(bin) {
if (
bin instanceof Object === false ||
2018-08-06 18:34:41 +02:00
bin.hasOwnProperty(internalKey) === false
) {
2018-08-06 18:34:41 +02:00
return reportBack('');
}
let entry = assetCacheRegistry[assetKey];
if ( entry === undefined ) {
2018-08-06 18:34:41 +02:00
return reportBack('');
}
entry.readTime = Date.now();
saveAssetCacheRegistry(true);
reportBack(bin[internalKey]);
};
2014-06-24 00:42:43 +02:00
Promise.all([
getAssetCacheRegistry(),
µBlock.cacheStorage.get(internalKey),
]).then(results => {
onAssetRead(results[1]);
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
});
};
2014-06-24 00:42:43 +02:00
const assetCacheWrite = function(assetKey, details, callback) {
2018-08-06 18:34:41 +02:00
let internalKey = 'cache/' + assetKey;
let content = '';
if ( typeof details === 'string' ) {
content = details;
} else if ( details instanceof Object ) {
content = details.content || '';
}
2014-06-24 00:42:43 +02:00
if ( content === '' ) {
return assetCacheRemove(assetKey, callback);
}
const onReady = function() {
2018-08-06 18:34:41 +02:00
let entry = assetCacheRegistry[assetKey];
if ( entry === undefined ) {
entry = assetCacheRegistry[assetKey] = {};
}
entry.writeTime = entry.readTime = Date.now();
if ( details instanceof Object && typeof details.url === 'string' ) {
entry.remoteURL = details.url;
}
µBlock.cacheStorage.set({ assetCacheRegistry, [internalKey]: content });
const result = { assetKey, content };
if ( typeof callback === 'function' ) {
callback(result);
}
// https://github.com/uBlockOrigin/uBlock-issues/issues/248
fireNotification('after-asset-updated', result);
2014-06-24 00:42:43 +02:00
};
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
getAssetCacheRegistry().then(( ) => onReady());
};
2014-06-24 00:42:43 +02:00
const assetCacheRemove = function(pattern, callback) {
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
getAssetCacheRegistry().then(cacheDict => {
const removedEntries = [];
const removedContent = [];
for ( const assetKey in cacheDict ) {
if ( pattern instanceof RegExp && !pattern.test(assetKey) ) {
continue;
}
if ( typeof pattern === 'string' && assetKey !== pattern ) {
continue;
}
removedEntries.push(assetKey);
removedContent.push('cache/' + assetKey);
delete cacheDict[assetKey];
2014-07-20 21:00:26 +02:00
}
if ( removedContent.length !== 0 ) {
µBlock.cacheStorage.remove(removedContent);
µBlock.cacheStorage.set({ assetCacheRegistry });
}
if ( typeof callback === 'function' ) {
callback();
}
for ( let i = 0; i < removedEntries.length; i++ ) {
fireNotification(
'after-asset-updated',
{ assetKey: removedEntries[i] }
);
}
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
});
};
const assetCacheMarkAsDirty = function(pattern, exclude, callback) {
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
if ( typeof exclude === 'function' ) {
callback = exclude;
exclude = undefined;
}
getAssetCacheRegistry().then(cacheDict => {
let mustSave = false;
for ( const assetKey in cacheDict ) {
if ( pattern instanceof RegExp ) {
if ( pattern.test(assetKey) === false ) { continue; }
} else if ( typeof pattern === 'string' ) {
if ( assetKey !== pattern ) { continue; }
} else if ( Array.isArray(pattern) ) {
if ( pattern.indexOf(assetKey) === -1 ) { continue; }
}
if ( exclude instanceof RegExp ) {
if ( exclude.test(assetKey) ) { continue; }
} else if ( typeof exclude === 'string' ) {
if ( assetKey === exclude ) { continue; }
} else if ( Array.isArray(exclude) ) {
if ( exclude.indexOf(assetKey) !== -1 ) { continue; }
}
const cacheEntry = cacheDict[assetKey];
if ( !cacheEntry.writeTime ) { continue; }
cacheDict[assetKey].writeTime = 0;
mustSave = true;
2014-08-21 01:39:49 +02:00
}
if ( mustSave ) {
µBlock.cacheStorage.set({ assetCacheRegistry });
}
if ( typeof callback === 'function' ) {
callback();
2014-07-20 21:00:26 +02:00
}
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
});
};
/******************************************************************************/
const stringIsNotEmpty = function(s) {
return typeof s === 'string' && s !== '';
};
/*******************************************************************************
User assets are NOT persisted in the cache storage. User assets are
recognized by the asset key which always starts with 'user-'.
TODO(seamless migration):
Can remove instances of old user asset keys when I am confident all users
are using uBO v1.11 and beyond.
**/
/*******************************************************************************
User assets are NOT persisted in the cache storage. User assets are
recognized by the asset key which always starts with 'user-'.
**/
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const readUserAsset = function(assetKey, callback) {
const reportBack = function(content) {
callback({ assetKey, content });
};
vAPI.storage.get(assetKey, bin => {
const content =
bin instanceof Object && typeof bin[assetKey] === 'string'
? bin[assetKey]
: '';
return reportBack(content);
});
// Remove obsolete entry
// TODO: remove once everybody is well beyond 1.18.6
vAPI.storage.remove('assets/user/filters.txt');
};
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const saveUserAsset = function(assetKey, content, callback) {
vAPI.storage.set({ [assetKey]: content }, ( ) => {
if ( callback instanceof Function ) {
callback({ assetKey, content });
}
});
};
/******************************************************************************/
api.get = function(assetKey, options, callback) {
if ( typeof options === 'function' ) {
callback = options;
options = {};
} else if ( typeof callback !== 'function' ) {
callback = noopfunc;
}
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
return new Promise(resolve => {
// start of executor
if ( assetKey === µBlock.userFiltersPath ) {
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
readUserAsset(assetKey, details => {
callback(details);
resolve(details);
});
return;
}
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
let assetDetails = {},
contentURLs,
contentURL;
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const reportBack = function(content, err) {
const details = { assetKey: assetKey, content: content };
if ( err ) {
details.error = assetDetails.lastError = err;
} else {
assetDetails.lastError = undefined;
}
callback(details);
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
resolve(details);
};
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const onContentNotLoaded = function() {
let isExternal;
while ( (contentURL = contentURLs.shift()) ) {
isExternal = reIsExternalPath.test(contentURL);
if ( isExternal === false || assetDetails.hasLocalURL !== true ) {
break;
}
}
if ( !contentURL ) {
return reportBack('', 'E_NOTFOUND');
}
if ( assetDetails.content === 'filters' ) {
api.fetchFilterList(contentURL, onContentLoaded, onContentNotLoaded);
} else {
api.fetchText(contentURL, onContentLoaded, onContentNotLoaded);
}
2014-07-20 21:00:26 +02:00
};
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const onContentLoaded = function(details) {
2017-03-05 18:54:47 +01:00
if ( stringIsNotEmpty(details.content) === false ) {
onContentNotLoaded();
return;
}
if ( reIsExternalPath.test(contentURL) && options.dontCache !== true ) {
assetCacheWrite(assetKey, {
2017-03-05 18:54:47 +01:00
content: details.content,
url: contentURL
});
}
2017-03-05 18:54:47 +01:00
reportBack(details.content);
};
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const onCachedContentLoaded = function(details) {
if ( details.content !== '' ) {
return reportBack(details.content);
}
getAssetSourceRegistry(function(registry) {
assetDetails = registry[assetKey] || {};
if ( typeof assetDetails.contentURL === 'string' ) {
contentURLs = [ assetDetails.contentURL ];
} else if ( Array.isArray(assetDetails.contentURL) ) {
contentURLs = assetDetails.contentURL.slice(0);
} else {
contentURLs = [];
}
onContentNotLoaded();
});
};
assetCacheRead(assetKey, onCachedContentLoaded);
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
// end of executor
});
};
/******************************************************************************/
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const getRemote = function(assetKey, callback) {
var assetDetails = {},
contentURLs,
contentURL;
var reportBack = function(content, err) {
var details = { assetKey: assetKey, content: content };
if ( err ) {
details.error = assetDetails.lastError = err;
} else {
assetDetails.lastError = undefined;
}
callback(details);
};
2017-03-05 18:54:47 +01:00
var onRemoteContentLoaded = function(details) {
if ( stringIsNotEmpty(details.content) === false ) {
registerAssetSource(assetKey, { error: { time: Date.now(), error: 'No content' } });
tryLoading();
2015-02-06 18:20:30 +01:00
return;
}
assetCacheWrite(assetKey, {
2017-03-05 18:54:47 +01:00
content: details.content,
url: contentURL
});
registerAssetSource(assetKey, { error: undefined });
2017-03-05 18:54:47 +01:00
reportBack(details.content);
};
2017-03-05 18:54:47 +01:00
var onRemoteContentError = function(details) {
var text = details.statusText;
if ( details.statusCode === 0 ) {
2017-01-22 22:05:16 +01:00
text = 'network error';
}
registerAssetSource(assetKey, { error: { time: Date.now(), error: text } });
tryLoading();
};
var tryLoading = function() {
while ( (contentURL = contentURLs.shift()) ) {
if ( reIsExternalPath.test(contentURL) ) { break; }
2014-07-20 21:00:26 +02:00
}
if ( !contentURL ) {
return reportBack('', 'E_NOTFOUND');
}
if ( assetDetails.content === 'filters' ) {
api.fetchFilterList(contentURL, onRemoteContentLoaded, onRemoteContentError);
} else {
api.fetchText(contentURL, onRemoteContentLoaded, onRemoteContentError);
}
};
2015-02-24 00:31:29 +01:00
getAssetSourceRegistry(function(registry) {
assetDetails = registry[assetKey] || {};
if ( typeof assetDetails.contentURL === 'string' ) {
contentURLs = [ assetDetails.contentURL ];
} else if ( Array.isArray(assetDetails.contentURL) ) {
contentURLs = assetDetails.contentURL.slice(0);
} else {
contentURLs = [];
}
tryLoading();
});
2014-06-24 00:42:43 +02:00
};
2014-08-21 01:39:49 +02:00
/******************************************************************************/
2014-06-24 00:42:43 +02:00
api.put = function(assetKey, content, callback) {
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
return new Promise(resolve => {
const onDone = function(details) {
if ( typeof callback === 'function' ) {
callback(details);
}
resolve(details);
};
if ( reIsUserAsset.test(assetKey) ) {
saveUserAsset(assetKey, content, onDone);
} else {
assetCacheWrite(assetKey, content, onDone);
}
});
};
/******************************************************************************/
api.metadata = function(callback) {
var assetRegistryReady = false,
cacheRegistryReady = false;
2014-08-27 15:37:08 +02:00
var onReady = function() {
var assetDict = JSON.parse(JSON.stringify(assetSourceRegistry)),
cacheDict = assetCacheRegistry,
assetEntry, cacheEntry,
now = Date.now(), obsoleteAfter;
for ( var assetKey in assetDict ) {
assetEntry = assetDict[assetKey];
cacheEntry = cacheDict[assetKey];
if ( cacheEntry ) {
assetEntry.cached = true;
assetEntry.writeTime = cacheEntry.writeTime;
obsoleteAfter = cacheEntry.writeTime + assetEntry.updateAfter * 86400000;
assetEntry.obsolete = obsoleteAfter < now;
assetEntry.remoteURL = cacheEntry.remoteURL;
} else if (
assetEntry.contentURL &&
assetEntry.contentURL.length !== 0
) {
assetEntry.writeTime = 0;
obsoleteAfter = 0;
assetEntry.obsolete = true;
}
}
callback(assetDict);
};
getAssetSourceRegistry(function() {
assetRegistryReady = true;
if ( cacheRegistryReady ) { onReady(); }
});
2014-06-24 00:42:43 +02:00
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
getAssetCacheRegistry().then(( ) => {
2017-01-26 16:17:38 +01:00
cacheRegistryReady = true;
if ( assetRegistryReady ) { onReady(); }
});
};
2014-06-24 00:42:43 +02:00
/******************************************************************************/
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
api.purge = assetCacheMarkAsDirty;
api.remove = function(pattern, callback) {
assetCacheRemove(pattern, callback);
};
2015-10-14 16:28:37 +02:00
api.rmrf = function() {
assetCacheRemove(/./);
2014-06-24 00:42:43 +02:00
};
/******************************************************************************/
// Asset updater area.
var updaterStatus,
updaterTimer,
updaterAssetDelayDefault = 120000,
updaterAssetDelay = updaterAssetDelayDefault,
updaterUpdated = [],
updaterFetched = new Set(),
noRemoteResources;
2015-02-24 00:31:29 +01:00
var updateFirst = function() {
// https://github.com/gorhill/uBlock/commit/126110c9a0a0630cd556f5cb215422296a961029
// Firefox extension reviewers do not want uBO/webext to fetch its own
// scriptlets/resources asset from the project's own repo (github.com).
// https://github.com/uBlockOrigin/uAssets/issues/1647#issuecomment-371456830
// Allow self-hosted dev build to update: if update_url is present but
// null, assume the extension is hosted on AMO.
if ( noRemoteResources === undefined ) {
noRemoteResources =
vAPI.webextFlavor.soup.has('firefox') &&
vAPI.webextFlavor.soup.has('webext') &&
vAPI.webextFlavor.soup.has('devbuild') === false;
}
updaterStatus = 'updating';
updaterFetched.clear();
updaterUpdated = [];
fireNotification('before-assets-updated');
updateNext();
2015-02-13 18:10:10 +01:00
};
var updateNext = function() {
var assetDict, cacheDict;
2015-03-11 04:46:18 +01:00
// This will remove a cached asset when it's no longer in use.
var garbageCollectOne = function(assetKey) {
var cacheEntry = cacheDict[assetKey];
if ( cacheEntry && cacheEntry.readTime < assetCacheRegistryStartTime ) {
assetCacheRemove(assetKey);
2015-02-13 18:10:10 +01:00
}
2015-03-11 16:05:13 +01:00
};
var findOne = function() {
var now = Date.now(),
assetEntry, cacheEntry;
for ( var assetKey in assetDict ) {
assetEntry = assetDict[assetKey];
if ( assetEntry.hasRemoteURL !== true ) { continue; }
if ( updaterFetched.has(assetKey) ) { continue; }
cacheEntry = cacheDict[assetKey];
if ( cacheEntry && (cacheEntry.writeTime + assetEntry.updateAfter * 86400000) > now ) {
2015-03-11 16:05:13 +01:00
continue;
}
remove ability to pull latest version of resources.txt from remote repo. This is required as per Firefox extension reviewers. Mail exchange: ======== Reviewer: > Do I read the code correctly that you are executing remote JS by > downloading/updating from > https://raw.githubusercontent.com/uBlockOrigin/uAssets/master/filters/resources.txt > and injecting scripts in contentscripts.js? Me: > Yes, resources.txt contains scriptlets or other resources used to: > > - Minimize potential page breakage (e.g. google-analytics.com/ga.js); > - Defuse anti-blockers (e.g. bab-defuser.js); > - Defuse anti-blockers or minimize page breakage through redirection > (e.g. 2x2-transparent.png) > > This is not a new feature -- this is also part of the legacy version, > and I consider this is a major feature of uBO. Given how fast things can > change out there, this allows me to quickly push fixes when a new issue > is reported for a site without having to go through a full update of the > extension. Reviewer: > I am aware that this is not a new feature. I am unclear why it has been > allowed in the past, since it violates our policy about remote code > execution. I assume it was missed due to the fairly complex codebase. > > I can approve this version so you are not blocked on the migration, but > eventually, you cannot use functionality that executes remote code. > Since we're moving to a more automated review process, you will be able > to ship new versions without being blocked on a human review. Me: > Do I understand correctly that extensions such as TamperMonkey or > ViolentMonkey won't be allowed on AMO? > > Those extensions are even more permissive than uBO given a user can > import scripts from any source, while with uBO only scriptlets which are > part of the project are allowed. Reviewer: > The key difference between add-ons like Tampermonkey and uBO is that in > Tampermonkey, users are making an active and conscious decision to > download and execute that specific code. In uBO, the user did not > initiate that download/execution, nor are they even aware of it > happening. Me: > So users of TamperMonkey -- tech-savvy or not -- can download & inject > countless 3rd-party user scripts from countless authors, have them > update on their own automatically at regular interval with no user > intervention. > > On the other hand, it's not acceptable for me, the author of the > extension, who users implicitly trusted when installing the extension, > who is completely controlling and vouching for the content of > "resources.txt", to have this one 1st-party resource file[1] to be > updated at regular interval with no user intervention. > > So anyways, what is expected from me at this point? Do I need to remove > scriptlet injection and resource redirection features? Do I need to > remove only the updating part of resources.txt? > > [1] key to core features of uBO (counter anti-blockers + page breakage > mitigations) and possibly an important factor in installing the > extension. ======== Now about this commit: the purpose of the code change here is to prevent "resources.txt" -- which is part of the package -- from being updated -- this applies only to the Firefox webext[-hybrid] version of uBO.
2017-08-30 15:15:06 +02:00
// Update of user scripts/resources forbidden?
if ( assetKey === 'ublock-resources' && noRemoteResources ) {
remove ability to pull latest version of resources.txt from remote repo. This is required as per Firefox extension reviewers. Mail exchange: ======== Reviewer: > Do I read the code correctly that you are executing remote JS by > downloading/updating from > https://raw.githubusercontent.com/uBlockOrigin/uAssets/master/filters/resources.txt > and injecting scripts in contentscripts.js? Me: > Yes, resources.txt contains scriptlets or other resources used to: > > - Minimize potential page breakage (e.g. google-analytics.com/ga.js); > - Defuse anti-blockers (e.g. bab-defuser.js); > - Defuse anti-blockers or minimize page breakage through redirection > (e.g. 2x2-transparent.png) > > This is not a new feature -- this is also part of the legacy version, > and I consider this is a major feature of uBO. Given how fast things can > change out there, this allows me to quickly push fixes when a new issue > is reported for a site without having to go through a full update of the > extension. Reviewer: > I am aware that this is not a new feature. I am unclear why it has been > allowed in the past, since it violates our policy about remote code > execution. I assume it was missed due to the fairly complex codebase. > > I can approve this version so you are not blocked on the migration, but > eventually, you cannot use functionality that executes remote code. > Since we're moving to a more automated review process, you will be able > to ship new versions without being blocked on a human review. Me: > Do I understand correctly that extensions such as TamperMonkey or > ViolentMonkey won't be allowed on AMO? > > Those extensions are even more permissive than uBO given a user can > import scripts from any source, while with uBO only scriptlets which are > part of the project are allowed. Reviewer: > The key difference between add-ons like Tampermonkey and uBO is that in > Tampermonkey, users are making an active and conscious decision to > download and execute that specific code. In uBO, the user did not > initiate that download/execution, nor are they even aware of it > happening. Me: > So users of TamperMonkey -- tech-savvy or not -- can download & inject > countless 3rd-party user scripts from countless authors, have them > update on their own automatically at regular interval with no user > intervention. > > On the other hand, it's not acceptable for me, the author of the > extension, who users implicitly trusted when installing the extension, > who is completely controlling and vouching for the content of > "resources.txt", to have this one 1st-party resource file[1] to be > updated at regular interval with no user intervention. > > So anyways, what is expected from me at this point? Do I need to remove > scriptlet injection and resource redirection features? Do I need to > remove only the updating part of resources.txt? > > [1] key to core features of uBO (counter anti-blockers + page breakage > mitigations) and possibly an important factor in installing the > extension. ======== Now about this commit: the purpose of the code change here is to prevent "resources.txt" -- which is part of the package -- from being updated -- this applies only to the Firefox webext[-hybrid] version of uBO.
2017-08-30 15:15:06 +02:00
continue;
}
2017-05-08 20:00:41 +02:00
if (
fireNotification(
'before-asset-updated',
{ assetKey: assetKey, type: assetEntry.content }
) === true
2017-05-08 20:00:41 +02:00
) {
return assetKey;
2015-03-11 16:05:13 +01:00
}
garbageCollectOne(assetKey);
2015-03-11 16:05:13 +01:00
}
};
var updatedOne = function(details) {
if ( details.content !== '' ) {
updaterUpdated.push(details.assetKey);
if ( details.assetKey === 'assets.json' ) {
updateAssetSourceRegistry(details.content);
}
2017-01-22 22:05:16 +01:00
} else {
fireNotification('asset-update-failed', { assetKey: details.assetKey });
2015-02-13 18:10:10 +01:00
}
if ( findOne() !== undefined ) {
vAPI.setTimeout(updateNext, updaterAssetDelay);
} else {
updateDone();
2015-02-13 18:10:10 +01:00
}
};
2015-03-11 04:46:18 +01:00
var updateOne = function() {
var assetKey = findOne();
if ( assetKey === undefined ) {
return updateDone();
}
updaterFetched.add(assetKey);
getRemote(assetKey, updatedOne);
};
2015-03-11 04:46:18 +01:00
getAssetSourceRegistry(function(dict) {
assetDict = dict;
if ( !cacheDict ) { return; }
updateOne();
2015-03-11 04:46:18 +01:00
});
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
getAssetCacheRegistry().then(dict => {
cacheDict = dict;
if ( !assetDict ) { return; }
updateOne();
});
2015-03-11 04:46:18 +01:00
};
var updateDone = function() {
var assetKeys = updaterUpdated.slice(0);
updaterFetched.clear();
updaterUpdated = [];
updaterStatus = undefined;
updaterAssetDelay = updaterAssetDelayDefault;
fireNotification('after-assets-updated', { assetKeys: assetKeys });
2015-03-11 16:05:13 +01:00
};
api.updateStart = function(details) {
var oldUpdateDelay = updaterAssetDelay,
2017-12-30 23:38:07 +01:00
newUpdateDelay = typeof details.delay === 'number' ?
details.delay :
updaterAssetDelayDefault;
updaterAssetDelay = Math.min(oldUpdateDelay, newUpdateDelay);
if ( updaterStatus !== undefined ) {
if ( newUpdateDelay < oldUpdateDelay ) {
clearTimeout(updaterTimer);
updaterTimer = vAPI.setTimeout(updateNext, updaterAssetDelay);
2015-02-13 18:10:10 +01:00
}
return;
2015-02-13 18:10:10 +01:00
}
updateFirst();
2015-02-13 18:10:10 +01:00
};
api.updateStop = function() {
if ( updaterTimer ) {
clearTimeout(updaterTimer);
updaterTimer = undefined;
2015-02-24 00:31:29 +01:00
}
if ( updaterStatus !== undefined ) {
updateDone();
2015-02-13 18:10:10 +01:00
}
};
/******************************************************************************/
return api;
/******************************************************************************/
2014-06-24 00:42:43 +02:00
})();
/******************************************************************************/