Skip to content

Commit

Permalink
Add support for cloud storage compression
Browse files Browse the repository at this point in the history
Cloud storage is a limited resource, and thus it
makes sense to support data compression before
sending the data to cloud storage.

A new hidden setting allows to toggle on
cloud storage compression:

name: cloudStorageCompression
default: false

By default, this hidden setting is `false`, and a
user must set it to `true` to enable compression
of cloud storage items.

This hidden setting will eventually be toggled
to `true` by default, when there is good confidence
a majority of users are using a version of uBO
which can properly handle compressed cloud storage
items.

A cursory assessment shows that compressed items
are roughly 40-50% smaller in size.
  • Loading branch information
gorhill committed Aug 16, 2020
1 parent de6a9e3 commit d8b6b31
Show file tree
Hide file tree
Showing 7 changed files with 367 additions and 206 deletions.
66 changes: 37 additions & 29 deletions platform/chromium/vapi-background.js
Original file line number Diff line number Diff line change
Expand Up @@ -1540,10 +1540,10 @@ vAPI.cloud = (( ) => {
// good thing given chrome.storage.sync.MAX_WRITE_OPERATIONS_PER_MINUTE
// and chrome.storage.sync.MAX_WRITE_OPERATIONS_PER_HOUR.

const getCoarseChunkCount = async function(dataKey) {
const getCoarseChunkCount = async function(datakey) {
const keys = {};
for ( let i = 0; i < maxChunkCountPerItem; i += 16 ) {
keys[dataKey + i.toString()] = '';
keys[datakey + i.toString()] = '';
}
let bin;
try {
Expand All @@ -1553,13 +1553,13 @@ vAPI.cloud = (( ) => {
}
let chunkCount = 0;
for ( let i = 0; i < maxChunkCountPerItem; i += 16 ) {
if ( bin[dataKey + i.toString()] === '' ) { break; }
if ( bin[datakey + i.toString()] === '' ) { break; }
chunkCount = i + 16;
}
return chunkCount;
};

const deleteChunks = function(dataKey, start) {
const deleteChunks = function(datakey, start) {
const keys = [];

// No point in deleting more than:
Expand All @@ -1570,34 +1570,37 @@ vAPI.cloud = (( ) => {
Math.ceil(maxStorageSize / maxChunkSize)
);
for ( let i = start; i < n; i++ ) {
keys.push(dataKey + i.toString());
keys.push(datakey + i.toString());
}
if ( keys.length !== 0 ) {
webext.storage.sync.remove(keys);
}
};

const push = async function(dataKey, data) {
let bin = {
'source': options.deviceName || options.defaultDeviceName,
'tstamp': Date.now(),
'data': data,
'size': 0
const push = async function(details) {
const { datakey, data, encode } = details;
const item = {
source: options.deviceName || options.defaultDeviceName,
tstamp: Date.now(),
data,
};
bin.size = JSON.stringify(bin).length;
const item = JSON.stringify(bin);
const json = JSON.stringify(item);
const encoded = encode instanceof Function
? await encode(json)
: json;

// Chunkify taking into account QUOTA_BYTES_PER_ITEM:
// https://developer.chrome.com/extensions/storage#property-sync
// "The maximum size (in bytes) of each individual item in sync
// "storage, as measured by the JSON stringification of its value
// "plus its key length."
bin = {};
let chunkCount = Math.ceil(item.length / maxChunkSize);
const bin = {};
const chunkCount = Math.ceil(encoded.length / maxChunkSize);
for ( let i = 0; i < chunkCount; i++ ) {
bin[dataKey + i.toString()] = item.substr(i * maxChunkSize, maxChunkSize);
bin[datakey + i.toString()]
= encoded.substr(i * maxChunkSize, maxChunkSize);
}
bin[dataKey + chunkCount.toString()] = ''; // Sentinel
bin[datakey + chunkCount.toString()] = ''; // Sentinel

try {
await webext.storage.sync.set(bin);
Expand All @@ -1606,18 +1609,19 @@ vAPI.cloud = (( ) => {
}

// Remove potentially unused trailing chunks
deleteChunks(dataKey, chunkCount);
deleteChunks(datakey, chunkCount);
};

const pull = async function(dataKey) {
const pull = async function(details) {
const { datakey, decode } = details;

const result = await getCoarseChunkCount(dataKey);
const result = await getCoarseChunkCount(datakey);
if ( typeof result !== 'number' ) {
return result;
}
const chunkKeys = {};
for ( let i = 0; i < result; i++ ) {
chunkKeys[dataKey + i.toString()] = '';
chunkKeys[datakey + i.toString()] = '';
}

let bin;
Expand All @@ -1633,31 +1637,35 @@ vAPI.cloud = (( ) => {
// happen when the number of chunks is a multiple of
// chunkCountPerFetch. Hence why we must also test against
// undefined.
let json = [], jsonSlice;
let encoded = [];
let i = 0;
for (;;) {
jsonSlice = bin[dataKey + i.toString()];
if ( jsonSlice === '' || jsonSlice === undefined ) { break; }
json.push(jsonSlice);
const slice = bin[datakey + i.toString()];
if ( slice === '' || slice === undefined ) { break; }
encoded.push(slice);
i += 1;
}
encoded = encoded.join('');
const json = decode instanceof Function
? await decode(encoded)
: encoded;
let entry = null;
try {
entry = JSON.parse(json.join(''));
entry = JSON.parse(json);
} catch(ex) {
}
return entry;
};

const used = async function(dataKey) {
const used = async function(datakey) {
if ( webext.storage.sync.getBytesInUse instanceof Function === false ) {
return;
}
const coarseCount = await getCoarseChunkCount(dataKey);
const coarseCount = await getCoarseChunkCount(datakey);
if ( typeof coarseCount !== 'number' ) { return; }
const keys = [];
for ( let i = 0; i < coarseCount; i++ ) {
keys.push(`${dataKey}${i}`);
keys.push(`${datakey}${i}`);
}
let results;
try {
Expand Down
1 change: 1 addition & 0 deletions src/js/background.js
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ const µBlock = (( ) => { // jshint ignore:line
cacheStorageAPI: 'unset',
cacheStorageCompression: true,
cacheControlForFirefox1376932: 'no-cache, no-store, must-revalidate',
cloudStorageCompression: false,
cnameIgnoreList: 'unset',
cnameIgnore1stParty: true,
cnameIgnoreExceptions: true,
Expand Down
71 changes: 43 additions & 28 deletions src/js/cachestorage.js
Original file line number Diff line number Diff line change
Expand Up @@ -195,22 +195,48 @@
return dbPromise;
};

const fromBlob = function(data) {
if ( data instanceof Blob === false ) {
return Promise.resolve(data);
}
return new Promise(resolve => {
const blobReader = new FileReader();
blobReader.onloadend = ev => {
resolve(new Uint8Array(ev.target.result));
};
blobReader.readAsArrayBuffer(data);
});
};

const toBlob = function(data) {
const value = data instanceof Uint8Array
? new Blob([ data ])
: data;
return Promise.resolve(value);
};

const compress = function(store, key, data) {
return µBlock.lz4Codec.encode(data, toBlob).then(value => {
store.push({ key, value });
});
};

const decompress = function(store, key, data) {
return µBlock.lz4Codec.decode(data, fromBlob).then(data => {
store[key] = data;
});
};

const getFromDb = async function(keys, keyvalStore, callback) {
if ( typeof callback !== 'function' ) { return; }
if ( keys.length === 0 ) { return callback(keyvalStore); }
const promises = [];
const gotOne = function() {
if ( typeof this.result !== 'object' ) { return; }
keyvalStore[this.result.key] = this.result.value;
if ( this.result.value instanceof Blob === false ) { return; }
promises.push(
µBlock.lz4Codec.decode(
this.result.key,
this.result.value
).then(result => {
keyvalStore[result.key] = result.data;
})
);
const { key, value } = this.result;
keyvalStore[key] = value;
if ( value instanceof Blob === false ) { return; }
promises.push(decompress(keyvalStore, key, value));
};
try {
const db = await getDb();
Expand Down Expand Up @@ -265,16 +291,10 @@
});
return;
}
keyvalStore[entry.key] = entry.value;
const { key, value } = entry;
keyvalStore[key] = value;
if ( entry.value instanceof Blob === false ) { return; }
promises.push(
µBlock.lz4Codec.decode(
entry.key,
entry.value
).then(result => {
keyvalStore[result.key] = result.value;
})
);
promises.push(decompress(keyvalStore, key, value));
}).catch(reason => {
console.info(`cacheStorage.getAllFromDb() failed: ${reason}`);
callback();
Expand All @@ -297,19 +317,14 @@
const entries = [];
const dontCompress =
µBlock.hiddenSettings.cacheStorageCompression !== true;
const handleEncodingResult = result => {
entries.push({ key: result.key, value: result.data });
};
for ( const key of keys ) {
const data = keyvalStore[key];
const isString = typeof data === 'string';
const value = keyvalStore[key];
const isString = typeof value === 'string';
if ( isString === false || dontCompress ) {
entries.push({ key, value: data });
entries.push({ key, value });
continue;
}
promises.push(
µBlock.lz4Codec.encode(key, data).then(handleEncodingResult)
);
promises.push(compress(entries, key, value));
}
const finish = ( ) => {
if ( callback === undefined ) { return; }
Expand Down
12 changes: 9 additions & 3 deletions src/js/cloud-ui.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ if ( self.cloud.datakey === '' ) { return; }
/******************************************************************************/

const fetchStorageUsed = async function() {
const elem = widget.querySelector('#cloudCapacity');
let elem = widget.querySelector('#cloudCapacity');
if ( elem.classList.contains('hide') ) { return; }
const result = await vAPI.messaging.send('cloudWidget', {
what: 'cloudUsed',
Expand All @@ -58,10 +58,16 @@ const fetchStorageUsed = async function() {
elem.classList.add('hide');
return;
}
const units = ' ' + vAPI.i18n('genericBytes');
elem.title = result.max.toLocaleString() + units;
const total = (result.total / result.max * 100).toFixed(1);
elem.firstElementChild.style.width = `${total}%`;
elem = elem.firstElementChild;
elem.style.width = `${total}%`;
elem.title = result.total.toLocaleString() + units;
const used = (result.used / result.total * 100).toFixed(1);
elem.firstElementChild.firstElementChild.style.width = `${used}%`;
elem = elem.firstElementChild;
elem.style.width = `${used}%`;
elem.title = result.used.toLocaleString() + units;
};

/******************************************************************************/
Expand Down
Loading

0 comments on commit d8b6b31

Please sign in to comment.