1
0
mirror of https://github.com/gorhill/uBlock.git synced 2024-11-17 16:02:33 +01:00
uBlock/src/js/assets.js

997 lines
32 KiB
JavaScript
Raw Normal View History

2014-06-24 00:42:43 +02:00
/*******************************************************************************
uBlock Origin - a browser extension to block requests.
Copyright (C) 2014-present Raymond Hill
2014-06-24 00:42:43 +02:00
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see {http://www.gnu.org/licenses/}.
Home: https://github.com/gorhill/uBlock
*/
'use strict';
2014-06-24 00:42:43 +02:00
/******************************************************************************/
µBlock.assets = (( ) => {
2014-06-24 00:42:43 +02:00
/******************************************************************************/
const reIsExternalPath = /^(?:[a-z-]+):\/\//;
const reIsUserAsset = /^user-/;
const errorCantConnectTo = vAPI.i18n('errorCantConnectTo');
2014-09-08 23:46:58 +02:00
const api = {};
// A hint for various pieces of code to take measures if possible to save
// bandwidth of remote servers.
let remoteServerFriendly = false;
/******************************************************************************/
const observers = [];
2014-06-24 00:42:43 +02:00
api.addObserver = function(observer) {
if ( observers.indexOf(observer) === -1 ) {
observers.push(observer);
}
2014-12-20 21:28:16 +01:00
};
api.removeObserver = function(observer) {
let pos;
while ( (pos = observers.indexOf(observer)) !== -1 ) {
observers.splice(pos, 1);
}
2014-12-20 21:28:16 +01:00
};
const fireNotification = function(topic, details) {
let result;
for ( const observer of observers ) {
const r = observer(topic, details);
if ( r !== undefined ) { result = r; }
}
return result;
};
/******************************************************************************/
api.fetch = function(url, options = {}) {
return new Promise((resolve, reject) => {
// Start of executor
const timeoutAfter = µBlock.hiddenSettings.assetFetchTimeout * 1000 || 30000;
const xhr = new XMLHttpRequest();
let contentLoaded = 0;
let timeoutTimer;
const cleanup = function() {
xhr.removeEventListener('load', onLoadEvent);
xhr.removeEventListener('error', onErrorEvent);
xhr.removeEventListener('abort', onErrorEvent);
xhr.removeEventListener('progress', onProgressEvent);
if ( timeoutTimer !== undefined ) {
clearTimeout(timeoutTimer);
timeoutTimer = undefined;
}
};
const fail = function(details, msg) {
µBlock.logger.writeOne({
realm: 'message',
type: 'error',
text: msg,
});
details.content = '';
details.error = msg;
reject(details);
};
// https://github.com/gorhill/uMatrix/issues/15
const onLoadEvent = function() {
cleanup();
2015-02-24 00:31:29 +01:00
// xhr for local files gives status 0, but actually succeeds
const details = {
url,
2017-03-05 18:54:47 +01:00
statusCode: this.status || 200,
statusText: this.statusText || ''
};
if ( details.statusCode < 200 || details.statusCode >= 300 ) {
return fail(details, `${url}: ${details.statusCode} ${details.statusText}`);
2015-02-15 13:16:31 +01:00
}
details.content = this.response;
resolve(details);
};
2015-02-24 21:35:32 +01:00
const onErrorEvent = function() {
cleanup();
fail({ url }, errorCantConnectTo.replace('{{msg}}', url));
};
const onTimeout = function() {
xhr.abort();
};
// https://github.com/gorhill/uBlock/issues/2526
// - Timeout only when there is no progress.
const onProgressEvent = function(ev) {
if ( ev.loaded === contentLoaded ) { return; }
contentLoaded = ev.loaded;
if ( timeoutTimer !== undefined ) {
clearTimeout(timeoutTimer);
}
timeoutTimer = vAPI.setTimeout(onTimeout, timeoutAfter);
2015-02-24 00:31:29 +01:00
};
2015-02-24 21:35:32 +01:00
// Be ready for thrown exceptions:
// I am pretty sure it used to work, but now using a URL such as
// `file:///` on Chromium 40 results in an exception being thrown.
2015-02-24 00:31:29 +01:00
try {
xhr.open('get', url, true);
xhr.addEventListener('load', onLoadEvent);
xhr.addEventListener('error', onErrorEvent);
xhr.addEventListener('abort', onErrorEvent);
xhr.addEventListener('progress', onProgressEvent);
xhr.responseType = options.responseType || 'text';
2015-02-24 00:31:29 +01:00
xhr.send();
timeoutTimer = vAPI.setTimeout(onTimeout, timeoutAfter);
2015-02-24 00:31:29 +01:00
} catch (e) {
onErrorEvent.call(xhr);
2015-02-24 00:31:29 +01:00
}
// End of executor
});
2014-06-24 00:42:43 +02:00
};
/******************************************************************************/
api.fetchText = async function(url) {
const isExternal = reIsExternalPath.test(url);
let actualUrl = isExternal ? url : vAPI.getURL(url);
// https://github.com/gorhill/uBlock/issues/2592
// Force browser cache to be bypassed, but only for resources which have
// been fetched more than one hour ago.
// https://github.com/uBlockOrigin/uBlock-issues/issues/682#issuecomment-515197130
// Provide filter list authors a way to completely bypass
// the browser cache.
// https://github.com/gorhill/uBlock/commit/048bfd251c9b#r37972005
// Use modulo prime numbers to avoid generating the same token at the
// same time across different days.
// Do not bypass browser cache if we are asked to be gentle on remote
// servers.
if ( isExternal && remoteServerFriendly !== true ) {
const cacheBypassToken =
µBlock.hiddenSettings.updateAssetBypassBrowserCache
? Math.floor(Date.now() / 1000) % 86413
: Math.floor(Date.now() / 3600000) % 13;
const queryValue = `_=${cacheBypassToken}`;
if ( actualUrl.indexOf('?') === -1 ) {
actualUrl += '?';
} else {
actualUrl += '&';
}
actualUrl += queryValue;
}
let details = { content: '' };
try {
details = await api.fetch(actualUrl);
// Consider an empty result to be an error
if ( stringIsNotEmpty(details.content) === false ) {
details.content = '';
}
// We never download anything else than plain text: discard if
// response appears to be a HTML document: could happen when server
// serves some kind of error page for example.
const text = details.content.trim();
if ( text.startsWith('<') && text.endsWith('>') ) {
details.content = '';
}
} catch(ex) {
details = ex;
}
// We want to return the caller's URL, not our internal one which may
// differ from the caller's one.
details.url = url;
return details;
};
/******************************************************************************/
// https://github.com/gorhill/uBlock/issues/3331
// Support the seamless loading of sublists.
api.fetchFilterList = async function(mainlistURL) {
const toParsedURL = url => {
try {
return new URL(url);
} catch (ex) {
}
};
// https://github.com/NanoAdblocker/NanoCore/issues/239
// Anything under URL's root directory is allowed to be fetched. The
// URL of a sublist will always be relative to the URL of the parent
// list (instead of the URL of the root list).
let rootDirectoryURL = toParsedURL(
reIsExternalPath.test(mainlistURL)
? mainlistURL
: vAPI.getURL(mainlistURL)
);
if ( rootDirectoryURL !== undefined ) {
const pos = rootDirectoryURL.pathname.lastIndexOf('/');
if ( pos !== -1 ) {
rootDirectoryURL.pathname =
rootDirectoryURL.pathname.slice(0, pos + 1);
} else {
rootDirectoryURL = undefined;
}
}
const sublistURLs = new Set();
const processIncludeDirectives = function(results) {
const out = [];
const reInclude = /^!#include +(\S+)/gm;
for ( const result of results ) {
if ( typeof result === 'string' ) {
out.push(result);
continue;
}
if ( result instanceof Object === false ) { continue; }
const content = result.content;
let lastIndex = 0;
for (;;) {
if ( rootDirectoryURL === undefined ) { break; }
const match = reInclude.exec(content);
if ( match === null ) { break; }
if ( toParsedURL(match[1]) !== undefined ) { continue; }
if ( match[1].indexOf('..') !== -1 ) { continue; }
// Compute nested list path relative to parent list path
const pos = result.url.lastIndexOf('/');
if ( pos === -1 ) { continue; }
const subURL = result.url.slice(0, pos + 1) + match[1];
if ( sublistURLs.has(subURL) ) { continue; }
sublistURLs.add(subURL);
out.push(
content.slice(lastIndex, match.index),
`! >>>>>>>> ${subURL}`,
api.fetchText(subURL),
`! <<<<<<<< ${subURL}`
);
lastIndex = reInclude.lastIndex;
}
out.push(lastIndex === 0 ? content : content.slice(lastIndex));
}
return out;
};
// https://github.com/AdguardTeam/FiltersRegistry/issues/82
// Not checking for `errored` status was causing repeated notifications
2018-07-06 16:47:51 +02:00
// to the caller. This can happen when more than one out of multiple
// sublists can't be fetched.
let allParts = [
this.fetchText(mainlistURL)
];
for (;;) {
allParts = processIncludeDirectives(await Promise.all(allParts));
if ( allParts.every(v => typeof v === 'string') ) { break; }
}
return {
url: mainlistURL,
content: allParts.length === 1
? allParts[0]
: allParts.map(s => s.trim()).filter(s => s !== '').join('\n') + '\n'
};
};
/*******************************************************************************
2014-07-22 18:26:11 +02:00
The purpose of the asset source registry is to keep key detail information
about an asset:
- Where to load it from: this may consist of one or more URLs, either local
or remote.
- After how many days an asset should be deemed obsolete -- i.e. in need of
an update.
- The origin and type of an asset.
- The last time an asset was registered.
**/
let assetSourceRegistryPromise,
assetSourceRegistry = Object.create(null);
const getAssetSourceRegistry = function() {
if ( assetSourceRegistryPromise === undefined ) {
assetSourceRegistryPromise = µBlock.cacheStorage.get(
'assetSourceRegistry'
).then(bin => {
if (
bin instanceof Object &&
bin.assetSourceRegistry instanceof Object
) {
assetSourceRegistry = bin.assetSourceRegistry;
return assetSourceRegistry;
}
return api.fetchText(
µBlock.assetsBootstrapLocation || 'assets/assets.json'
).then(details => {
return details.content !== ''
? details
: api.fetchText('assets/assets.json');
}).then(details => {
updateAssetSourceRegistry(details.content, true);
return assetSourceRegistry;
});
});
}
return assetSourceRegistryPromise;
};
const registerAssetSource = function(assetKey, dict) {
const entry = assetSourceRegistry[assetKey] || {};
for ( const prop in dict ) {
if ( dict.hasOwnProperty(prop) === false ) { continue; }
if ( dict[prop] === undefined ) {
delete entry[prop];
} else {
entry[prop] = dict[prop];
}
2015-01-16 16:57:56 +01:00
}
let contentURL = dict.contentURL;
if ( contentURL !== undefined ) {
if ( typeof contentURL === 'string' ) {
contentURL = entry.contentURL = [ contentURL ];
} else if ( Array.isArray(contentURL) === false ) {
contentURL = entry.contentURL = [];
}
let remoteURLCount = 0;
for ( let i = 0; i < contentURL.length; i++ ) {
if ( reIsExternalPath.test(contentURL[i]) ) {
remoteURLCount += 1;
}
}
entry.hasLocalURL = remoteURLCount !== contentURL.length;
entry.hasRemoteURL = remoteURLCount !== 0;
} else if ( entry.contentURL === undefined ) {
entry.contentURL = [];
2014-12-20 21:28:16 +01:00
}
if ( typeof entry.updateAfter !== 'number' ) {
entry.updateAfter = 5;
}
if ( entry.submitter ) {
entry.submitTime = Date.now(); // To detect stale entries
2014-06-24 00:42:43 +02:00
}
assetSourceRegistry[assetKey] = entry;
};
2014-06-24 00:42:43 +02:00
const unregisterAssetSource = function(assetKey) {
assetCacheRemove(assetKey);
delete assetSourceRegistry[assetKey];
};
2014-07-22 18:26:11 +02:00
const saveAssetSourceRegistry = (( ) => {
let timer;
const save = function() {
timer = undefined;
µBlock.cacheStorage.set({ assetSourceRegistry });
2014-06-24 00:42:43 +02:00
};
return function(lazily) {
if ( timer !== undefined ) {
clearTimeout(timer);
}
if ( lazily ) {
timer = vAPI.setTimeout(save, 500);
} else {
save();
}
2014-06-24 00:42:43 +02:00
};
})();
2014-06-24 00:42:43 +02:00
const updateAssetSourceRegistry = function(json, silent) {
let newDict;
try {
newDict = JSON.parse(json);
} catch (ex) {
}
if ( newDict instanceof Object === false ) { return; }
2014-06-24 00:42:43 +02:00
const oldDict = assetSourceRegistry;
2017-01-26 16:17:38 +01:00
// Remove obsolete entries (only those which were built-in).
for ( const assetKey in oldDict ) {
2017-01-26 16:17:38 +01:00
if (
newDict[assetKey] === undefined &&
oldDict[assetKey].submitter === undefined
) {
unregisterAssetSource(assetKey);
2014-07-22 18:26:11 +02:00
}
2017-01-26 16:17:38 +01:00
}
// Add/update existing entries. Notify of new asset sources.
for ( const assetKey in newDict ) {
2017-01-26 16:17:38 +01:00
if ( oldDict[assetKey] === undefined && !silent ) {
fireNotification(
'builtin-asset-source-added',
{ assetKey: assetKey, entry: newDict[assetKey] }
);
}
registerAssetSource(assetKey, newDict[assetKey]);
}
saveAssetSourceRegistry();
2014-06-24 00:42:43 +02:00
};
api.registerAssetSource = async function(assetKey, details) {
await getAssetSourceRegistry();
registerAssetSource(assetKey, details);
saveAssetSourceRegistry(true);
2014-06-24 00:42:43 +02:00
};
api.unregisterAssetSource = async function(assetKey) {
await getAssetSourceRegistry();
unregisterAssetSource(assetKey);
saveAssetSourceRegistry(true);
};
2014-08-20 15:24:16 +02:00
/*******************************************************************************
The purpose of the asset cache registry is to keep track of all assets
which have been persisted into the local cache.
2014-06-24 00:42:43 +02:00
**/
2014-07-25 22:12:20 +02:00
const assetCacheRegistryStartTime = Date.now();
let assetCacheRegistryPromise;
let assetCacheRegistry = {};
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
const getAssetCacheRegistry = function() {
if ( assetCacheRegistryPromise === undefined ) {
assetCacheRegistryPromise = µBlock.cacheStorage.get(
'assetCacheRegistry'
).then(bin => {
if (
bin instanceof Object &&
bin.assetCacheRegistry instanceof Object
) {
assetCacheRegistry = bin.assetCacheRegistry;
}
return assetCacheRegistry;
});
}
2014-06-24 00:42:43 +02:00
return assetCacheRegistryPromise;
2014-06-24 00:42:43 +02:00
};
const saveAssetCacheRegistry = (( ) => {
let timer;
const save = function() {
timer = undefined;
µBlock.cacheStorage.set({ assetCacheRegistry });
};
return function(lazily) {
if ( timer !== undefined ) { clearTimeout(timer); }
if ( lazily ) {
timer = vAPI.setTimeout(save, 30000);
} else {
save();
}
};
})();
2014-07-25 22:12:20 +02:00
const assetCacheRead = async function(assetKey, updateReadTime = false) {
const internalKey = `cache/${assetKey}`;
2014-07-25 22:12:20 +02:00
const reportBack = function(content) {
2018-08-06 18:34:41 +02:00
if ( content instanceof Blob ) { content = ''; }
const details = { assetKey: assetKey, content: content };
if ( content === '' ) { details.error = 'ENOTFOUND'; }
return details;
};
2014-06-24 00:42:43 +02:00
const [ , bin ] = await Promise.all([
getAssetCacheRegistry(),
µBlock.cacheStorage.get(internalKey),
]);
if (
bin instanceof Object === false ||
bin.hasOwnProperty(internalKey) === false
) {
return reportBack('');
}
const entry = assetCacheRegistry[assetKey];
if ( entry === undefined ) {
return reportBack('');
}
entry.readTime = Date.now();
if ( updateReadTime ) {
saveAssetCacheRegistry(true);
}
return reportBack(bin[internalKey]);
};
2014-06-24 00:42:43 +02:00
const assetCacheWrite = async function(assetKey, details) {
2018-08-06 18:34:41 +02:00
let content = '';
if ( typeof details === 'string' ) {
content = details;
} else if ( details instanceof Object ) {
content = details.content || '';
}
2014-06-24 00:42:43 +02:00
if ( content === '' ) {
return assetCacheRemove(assetKey);
}
const cacheDict = await getAssetCacheRegistry();
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
let entry = cacheDict[assetKey];
if ( entry === undefined ) {
entry = cacheDict[assetKey] = {};
}
entry.writeTime = entry.readTime = Date.now();
if ( details instanceof Object && typeof details.url === 'string' ) {
entry.remoteURL = details.url;
}
µBlock.cacheStorage.set({
assetCacheRegistry,
[`cache/${assetKey}`]: content
});
const result = { assetKey, content };
// https://github.com/uBlockOrigin/uBlock-issues/issues/248
fireNotification('after-asset-updated', result);
return result;
};
2014-06-24 00:42:43 +02:00
const assetCacheRemove = async function(pattern) {
const cacheDict = await getAssetCacheRegistry();
const removedEntries = [];
const removedContent = [];
for ( const assetKey in cacheDict ) {
if ( pattern instanceof RegExp && !pattern.test(assetKey) ) {
continue;
}
if ( typeof pattern === 'string' && assetKey !== pattern ) {
continue;
}
removedEntries.push(assetKey);
removedContent.push('cache/' + assetKey);
delete cacheDict[assetKey];
}
if ( removedContent.length !== 0 ) {
µBlock.cacheStorage.remove(removedContent);
µBlock.cacheStorage.set({ assetCacheRegistry });
}
for ( let i = 0; i < removedEntries.length; i++ ) {
fireNotification(
'after-asset-updated',
{ assetKey: removedEntries[i] }
);
}
};
const assetCacheMarkAsDirty = async function(pattern, exclude) {
const cacheDict = await getAssetCacheRegistry();
let mustSave = false;
for ( const assetKey in cacheDict ) {
if ( pattern instanceof RegExp ) {
if ( pattern.test(assetKey) === false ) { continue; }
} else if ( typeof pattern === 'string' ) {
if ( assetKey !== pattern ) { continue; }
} else if ( Array.isArray(pattern) ) {
if ( pattern.indexOf(assetKey) === -1 ) { continue; }
}
if ( exclude instanceof RegExp ) {
if ( exclude.test(assetKey) ) { continue; }
} else if ( typeof exclude === 'string' ) {
if ( assetKey === exclude ) { continue; }
} else if ( Array.isArray(exclude) ) {
if ( exclude.indexOf(assetKey) !== -1 ) { continue; }
}
const cacheEntry = cacheDict[assetKey];
if ( !cacheEntry.writeTime ) { continue; }
cacheDict[assetKey].writeTime = 0;
mustSave = true;
}
if ( mustSave ) {
µBlock.cacheStorage.set({ assetCacheRegistry });
}
};
/******************************************************************************/
const stringIsNotEmpty = function(s) {
return typeof s === 'string' && s !== '';
};
/*******************************************************************************
User assets are NOT persisted in the cache storage. User assets are
recognized by the asset key which always starts with 'user-'.
TODO(seamless migration):
Can remove instances of old user asset keys when I am confident all users
are using uBO v1.11 and beyond.
**/
/*******************************************************************************
User assets are NOT persisted in the cache storage. User assets are
recognized by the asset key which always starts with 'user-'.
**/
const readUserAsset = async function(assetKey) {
const bin = await vAPI.storage.get(assetKey);
const content =
bin instanceof Object && typeof bin[assetKey] === 'string'
? bin[assetKey]
: '';
// Remove obsolete entry
// TODO: remove once everybody is well beyond 1.18.6
vAPI.storage.remove('assets/user/filters.txt');
return { assetKey, content };
};
const saveUserAsset = function(assetKey, content) {
return vAPI.storage.set({ [assetKey]: content }).then(( ) => {
return { assetKey, content };
});
};
/******************************************************************************/
api.get = async function(assetKey, options = {}) {
if ( assetKey === µBlock.userFiltersPath ) {
return readUserAsset(assetKey);
}
let assetDetails = {};
const reportBack = (content, url = '', err = undefined) => {
const details = { assetKey, content };
if ( err !== undefined ) {
details.error = assetDetails.lastError = err;
} else {
assetDetails.lastError = undefined;
}
if ( options.needSourceURL ) {
if (
url === '' &&
assetCacheRegistry instanceof Object &&
assetCacheRegistry[assetKey] instanceof Object
) {
details.sourceURL = assetCacheRegistry[assetKey].remoteURL;
}
if ( reIsExternalPath.test(url) ) {
details.sourceURL = url;
}
}
return details;
};
// Skip read-time property for non-updatable assets: the property is
// completely unused for such assets and thus there is no point incurring
// storage write overhead at launch when reading compiled or selfie assets.
const updateReadTime = /^(?:compiled|selfie)\//.test(assetKey) === false;
const details = await assetCacheRead(assetKey, updateReadTime);
if ( details.content !== '' ) {
return reportBack(details.content);
}
2014-07-20 21:00:26 +02:00
const assetRegistry = await getAssetSourceRegistry();
assetDetails = assetRegistry[assetKey] || {};
let contentURLs = [];
if ( typeof assetDetails.contentURL === 'string' ) {
contentURLs = [ assetDetails.contentURL ];
} else if ( Array.isArray(assetDetails.contentURL) ) {
contentURLs = assetDetails.contentURL.slice(0);
}
for ( const contentURL of contentURLs ) {
if ( reIsExternalPath.test(contentURL) && assetDetails.hasLocalURL ) {
continue;
}
const details = assetDetails.content === 'filters'
? await api.fetchFilterList(contentURL)
: await api.fetchText(contentURL);
if ( details.content === '' ) { continue; }
if ( reIsExternalPath.test(contentURL) && options.dontCache !== true ) {
assetCacheWrite(assetKey, {
content: details.content,
url: contentURL,
});
}
return reportBack(details.content, contentURL);
}
return reportBack('', '', 'ENOTFOUND');
};
/******************************************************************************/
const getRemote = async function(assetKey) {
const assetRegistry = await getAssetSourceRegistry();
const assetDetails = assetRegistry[assetKey] || {};
const reportBack = function(content, err) {
const details = { assetKey: assetKey, content: content };
if ( err ) {
details.error = assetDetails.lastError = err;
} else {
assetDetails.lastError = undefined;
}
return details;
};
let contentURLs = [];
if ( typeof assetDetails.contentURL === 'string' ) {
contentURLs = [ assetDetails.contentURL ];
} else if ( Array.isArray(assetDetails.contentURL) ) {
contentURLs = assetDetails.contentURL.slice(0);
}
// If asked to be gentle on remote servers, favour using dedicated CDN
// servers. If more than one CDN server is present, randomly shuffle the
// set of servers so as to spread the bandwidth burden.
if ( remoteServerFriendly && Array.isArray(assetDetails.cdnURLs) ) {
const cdnURLs = assetDetails.cdnURLs.slice();
for ( let i = 0, n = cdnURLs.length; i < n; i++ ) {
const j = Math.floor(Math.random() * n);
if ( j === i ) { continue; }
[ cdnURLs[j], cdnURLs[i] ] = [ cdnURLs[i], cdnURLs[j] ];
}
contentURLs.unshift(...cdnURLs);
}
for ( const contentURL of contentURLs ) {
if ( reIsExternalPath.test(contentURL) === false ) { continue; }
const result = assetDetails.content === 'filters'
? await api.fetchFilterList(contentURL)
: await api.fetchText(contentURL);
2015-02-24 00:31:29 +01:00
// https://www.reddit.com/r/uBlockOrigin/comments/hbpo86/
// Server sent a truncated list? If the new list is smaller than the
// currently cached one by more than 25%, assume a server error.
if (
stringIsNotEmpty(result.content) &&
typeof assetDetails.size === 'number' &&
assetDetails.size !== 0
) {
const loss = 1 - result.content.length / assetDetails.size;
if ( loss > 0.25 ) {
result.statusCode = 206;
result.statusText = 'Partial Content';
result.content = '';
}
}
// Failure
if ( stringIsNotEmpty(result.content) === false ) {
let error = result.statusText;
if ( result.statusCode === 0 ) {
error = 'network error';
}
registerAssetSource(assetKey, {
error: { time: Date.now(), error }
});
continue;
}
// Success
assetCacheWrite(assetKey, {
content: result.content,
url: contentURL
});
registerAssetSource(assetKey, {
error: undefined,
size: result.content.length
});
return reportBack(result.content);
}
return reportBack('', 'ENOTFOUND');
2014-06-24 00:42:43 +02:00
};
2014-08-21 01:39:49 +02:00
/******************************************************************************/
2014-06-24 00:42:43 +02:00
api.put = async function(assetKey, content) {
return reIsUserAsset.test(assetKey)
? await saveUserAsset(assetKey, content)
: await assetCacheWrite(assetKey, content);
};
/******************************************************************************/
api.metadata = async function() {
await Promise.all([
getAssetSourceRegistry(),
getAssetCacheRegistry(),
]);
const assetDict = JSON.parse(JSON.stringify(assetSourceRegistry));
const cacheDict = assetCacheRegistry;
const now = Date.now();
for ( const assetKey in assetDict ) {
const assetEntry = assetDict[assetKey];
const cacheEntry = cacheDict[assetKey];
if ( cacheEntry ) {
assetEntry.cached = true;
assetEntry.writeTime = cacheEntry.writeTime;
const obsoleteAfter =
cacheEntry.writeTime + assetEntry.updateAfter * 86400000;
assetEntry.obsolete = obsoleteAfter < now;
assetEntry.remoteURL = cacheEntry.remoteURL;
} else if (
assetEntry.contentURL &&
assetEntry.contentURL.length !== 0
) {
assetEntry.writeTime = 0;
assetEntry.obsolete = true;
}
}
return assetDict;
};
2014-06-24 00:42:43 +02:00
/******************************************************************************/
Refactor selfie generation into a more flexible persistence mechanism The motivation is to address the higher peak memory usage at launch time with 3rd-gen HNTrie when a selfie was present. The selfie generation prior to this change was to collect all filtering data into a single data structure, and then to serialize that whole structure at once into storage (using JSON.stringify). However, HNTrie serialization requires that a large UintArray32 be converted into a plain JS array, which itslef would be indirectly converted into a JSON string. This was the main reason why peak memory usage would be higher at launch from selfie, since the JSON string would need to be wholly unserialized into JS objects, which themselves would need to be converted into more specialized data structures (like that Uint32Array one). The solution to lower peak memory usage at launch is to refactor selfie generation to allow a more piecemeal approach: each filtering component is given the ability to serialize itself rather than to be forced to be embedded in the master selfie. With this approach, the HNTrie buffer can now serialize to its own storage by converting the buffer data directly into a string which can be directly sent to storage. This avoiding expensive intermediate steps such as converting into a JS array and then to a JSON string. As part of the refactoring, there was also opportunistic code upgrade to ES6 and Promise (eventually all of uBO's code will be proper ES6). Additionally, the polyfill to bring getBytesInUse() to Firefox has been revisited to replace the rather expensive previous implementation with an implementation with virtually no overhead.
2019-02-14 19:33:55 +01:00
api.purge = assetCacheMarkAsDirty;
api.remove = function(pattern) {
return assetCacheRemove(pattern);
};
2015-10-14 16:28:37 +02:00
api.rmrf = function() {
return assetCacheRemove(/./);
2014-06-24 00:42:43 +02:00
};
/******************************************************************************/
// Asset updater area.
const updaterAssetDelayDefault = 120000;
const updaterUpdated = [];
const updaterFetched = new Set();
let updaterStatus;
let updaterTimer;
let updaterAssetDelay = updaterAssetDelayDefault;
let updaterAuto = false;
2015-02-24 00:31:29 +01:00
const updateFirst = function() {
updaterStatus = 'updating';
updaterFetched.clear();
updaterUpdated.length = 0;
fireNotification('before-assets-updated');
updateNext();
2015-02-13 18:10:10 +01:00
};
const updateNext = async function() {
const [ assetDict, cacheDict ] = await Promise.all([
getAssetSourceRegistry(),
getAssetCacheRegistry(),
]);
const now = Date.now();
let assetKeyToUpdate;
for ( const assetKey in assetDict ) {
const assetEntry = assetDict[assetKey];
if ( assetEntry.hasRemoteURL !== true ) { continue; }
if ( updaterFetched.has(assetKey) ) { continue; }
const cacheEntry = cacheDict[assetKey];
if (
(cacheEntry instanceof Object) &&
(cacheEntry.writeTime + assetEntry.updateAfter * 86400000) > now
) {
continue;
2015-03-11 16:05:13 +01:00
}
if (
fireNotification('before-asset-updated', {
assetKey,
type: assetEntry.content
}) === true
) {
assetKeyToUpdate = assetKey;
break;
2015-02-13 18:10:10 +01:00
}
// This will remove a cached asset when it's no longer in use.
if ( cacheEntry && cacheEntry.readTime < assetCacheRegistryStartTime ) {
assetCacheRemove(assetKey);
2015-02-13 18:10:10 +01:00
}
}
if ( assetKeyToUpdate === undefined ) {
return updateDone();
}
updaterFetched.add(assetKeyToUpdate);
2015-03-11 04:46:18 +01:00
// In auto-update context, be gentle on remote servers.
remoteServerFriendly = updaterAuto;
const result = await getRemote(assetKeyToUpdate);
remoteServerFriendly = false;
if ( result.content !== '' ) {
updaterUpdated.push(result.assetKey);
if ( result.assetKey === 'assets.json' ) {
updateAssetSourceRegistry(result.content);
}
} else {
fireNotification('asset-update-failed', { assetKey: result.assetKey });
}
2015-03-11 04:46:18 +01:00
vAPI.setTimeout(updateNext, updaterAssetDelay);
2015-03-11 04:46:18 +01:00
};
const updateDone = function() {
const assetKeys = updaterUpdated.slice(0);
updaterFetched.clear();
updaterUpdated.length = 0;
updaterStatus = undefined;
updaterAssetDelay = updaterAssetDelayDefault;
fireNotification('after-assets-updated', { assetKeys: assetKeys });
2015-03-11 16:05:13 +01:00
};
api.updateStart = function(details) {
const oldUpdateDelay = updaterAssetDelay;
const newUpdateDelay = typeof details.delay === 'number'
? details.delay
: updaterAssetDelayDefault;
updaterAssetDelay = Math.min(oldUpdateDelay, newUpdateDelay);
updaterAuto = details.auto === true;
if ( updaterStatus !== undefined ) {
if ( newUpdateDelay < oldUpdateDelay ) {
clearTimeout(updaterTimer);
updaterTimer = vAPI.setTimeout(updateNext, updaterAssetDelay);
2015-02-13 18:10:10 +01:00
}
return;
2015-02-13 18:10:10 +01:00
}
updateFirst();
2015-02-13 18:10:10 +01:00
};
api.updateStop = function() {
if ( updaterTimer ) {
clearTimeout(updaterTimer);
updaterTimer = undefined;
2015-02-24 00:31:29 +01:00
}
if ( updaterStatus !== undefined ) {
updateDone();
2015-02-13 18:10:10 +01:00
}
};
api.isUpdating = function() {
return updaterStatus === 'updating' &&
updaterAssetDelay <= µBlock.hiddenSettings.manualUpdateAssetFetchPeriod;
};
/******************************************************************************/
return api;
/******************************************************************************/
2014-06-24 00:42:43 +02:00
})();
/******************************************************************************/