v86/src/browser/filestorage.js

455 lines
15 KiB
JavaScript
Raw Normal View History

"use strict";
const INDEXEDDB_STORAGE_VERSION = 2;
const INDEXEDDB_STORAGE_NAME = "v86-filesystem-storage";
const INDEXEDDB_STORAGE_STORE = "store";
const INDEXEDDB_STORAGE_KEY_PATH = "sha256sum";
const INDEXEDDB_STORAGE_DATA_PATH = "data";
const INDEXEDDB_STORAGE_GET_BLOCK_KEY = (sha256sum, block_number) =>
block_number === 0 ? sha256sum : `${sha256sum}-${block_number - 1}`;
const INDEXEDDB_STORAGE_BLOCKSIZE = 4096;
/** @interface */
function FileStorageInterface() {}
/**
* Read a portion of a file.
* @param {string} sha256sum
* @param {number} offset
* @param {number} count
* @return {!Promise<Uint8Array>} null if file does not exist.
*/
FileStorageInterface.prototype.read = function(sha256sum, offset, count) {};
/**
* Add a read-only file to the filestorage.
* @param {string} sha256sum
* @param {!Uint8Array} data
* @return {!Promise}
*/
FileStorageInterface.prototype.set = function(sha256sum, data) {};
/**
* Call this when the file won't be used soon, e.g. when a file closes or when this immutable
* version is already out of date. It is used to help prevent accumulation of unused files in
* memory in the long run for some FileStorage mediums.
*/
FileStorageInterface.prototype.uncache = function(sha256sum) {};
/**
* @constructor
* @implements {FileStorageInterface}
*/
function MemoryFileStorage()
{
/**
* From sha256sum to file data.
* @type {Map<string,Uint8Array>}
*/
this.filedata = new Map();
}
/**
* @param {string} sha256sum
* @param {number} offset
* @param {number} count
* @return {!Promise<Uint8Array>} null if file does not exist.
*/
2021-01-01 02:14:29 +01:00
MemoryFileStorage.prototype.read = async function(sha256sum, offset, count)
{
dbg_assert(sha256sum, "MemoryFileStorage read: sha256sum should be a non-empty string");
const data = this.filedata.get(sha256sum);
if(!data)
{
return null;
}
return data.subarray(offset, offset + count);
2021-01-01 02:14:29 +01:00
};
/**
* @param {string} sha256sum
* @param {!Uint8Array} data
*/
2021-01-01 02:14:29 +01:00
MemoryFileStorage.prototype.set = async function(sha256sum, data)
{
dbg_assert(sha256sum, "MemoryFileStorage set: sha256sum should be a non-empty string");
dbg_assert(!this.filedata.has(sha256sum), "MemoryFileStorage set: Storage should be read-only");
this.filedata.set(sha256sum, data);
2021-01-01 02:14:29 +01:00
};
/**
* @param {string} sha256sum
*/
MemoryFileStorage.prototype.uncache = function(sha256sum)
{
this.filedata.delete(sha256sum);
};
/**
* Use IndexedDBFileStorage.try_create() instead.
* @private
* @constructor
* @param {!IDBDatabase} db The IndexedDB database opened via init_db().
* @implements {FileStorageInterface}
*/
function IndexedDBFileStorage(db)
{
this.db = db;
}
2021-01-01 02:14:29 +01:00
IndexedDBFileStorage.try_create = async function()
{
if(typeof window === "undefined" || !window.indexedDB)
{
throw new Error("IndexedDB is not available");
}
2021-01-01 02:14:29 +01:00
const db = await IndexedDBFileStorage.init_db();
const file_storage = new IndexedDBFileStorage(db);
return file_storage;
2021-01-01 02:14:29 +01:00
};
/**
* @return {!Promise<!IDBDatabase>}
*/
IndexedDBFileStorage.init_db = function()
{
return new Promise((resolve, reject) =>
{
const open_request = indexedDB.open(INDEXEDDB_STORAGE_NAME, INDEXEDDB_STORAGE_VERSION);
open_request.onblocked = event =>
{
dbg_log("IndexedDB blocked by an older database version being opened.", LOG_9P);
};
open_request.onerror = event =>
{
dbg_log("Error opening IndexedDB! Are you in private browsing mode? Error:", LOG_9P);
dbg_log(open_request.error.toString(), LOG_9P);
reject(open_request.error);
};
/** @suppress{uselessCode} */
open_request.onupgradeneeded = event =>
{
const db = open_request.result;
if(event.oldVersion < 1)
{
// Initial version.
db.createObjectStore(INDEXEDDB_STORAGE_STORE, { keyPath: INDEXEDDB_STORAGE_KEY_PATH });
}
if(event.oldVersion < 2)
{
// Version 2 removes total_size and extra_block_count from the base entries.
// No changes needed, but new files written are not backwards compatible.
}
};
open_request.onsuccess = event =>
{
const db = open_request.result;
db.onabort = event =>
{
dbg_assert(false, "IndexedDBFileStorage: transaction aborted unexpectedly");
};
db.onclose = event =>
{
dbg_assert(false, "IndexedDBFileStorage: connection closed unexpectedly");
};
db.onerror = event =>
{
const error = event.target.error;
dbg_log("IndexedDBFileStorage: unexpected error: " + error, LOG_9P);
throw error;
};
db.onversionchange = event =>
{
dbg_log("Caution: Another v86 instance might be trying to upgrade the IndexedDB " +
"database to a newer version, or a request has been issued to delete the " +
"database, but is blocked by this current v86 instance ", LOG_9P);
};
resolve(db);
};
});
};
/**
* @private
* @param {IDBObjectStore} store
* @param {string} sha256sum
* @return {!Promise<boolean>}
*/
IndexedDBFileStorage.prototype.db_has_file = function(store, sha256sum)
{
return new Promise((resolve, reject) =>
{
const request = store.count(sha256sum);
request.onsuccess = event => resolve(/** @type {number} **/ (request.result) > 0);
});
};
/**
* @param {string} sha256sum
* @param {number} offset
* @param {number} count
* @return {!Promise<Uint8Array>} null if file does not exist.
*/
IndexedDBFileStorage.prototype.read = function(sha256sum, offset, count)
{
dbg_assert(sha256sum, "IndexedDBFileStorage read: sha256sum should be a non-empty string");
const transaction = this.db.transaction(INDEXEDDB_STORAGE_STORE, "readonly");
transaction.onerror = event =>
{
const error = event.target.error;
dbg_log(`IndexedDBFileStorage read: Error with transaction: ${error}`, LOG_9P);
throw error;
};
const store = transaction.objectStore(INDEXEDDB_STORAGE_STORE);
const block_number_start = Math.floor(offset / INDEXEDDB_STORAGE_BLOCKSIZE);
const block_number_end = count > 0 ?
Math.floor((offset + count - 1) / INDEXEDDB_STORAGE_BLOCKSIZE) :
block_number_start;
return new Promise((resolve, reject) =>
{
if(block_number_end === 0)
{
// Only first block to be read.
const block_key = INDEXEDDB_STORAGE_GET_BLOCK_KEY(sha256sum, 0);
const block_request = store.get(block_key);
2021-01-01 02:14:29 +01:00
block_request.onsuccess = async event =>
{
const block_entry = block_request.result;
if(!block_entry)
{
resolve(null);
return;
}
const block_data = block_entry[INDEXEDDB_STORAGE_DATA_PATH];
dbg_assert(block_data instanceof Uint8Array, "IndexedDBFileStorage read: " +
`Entry for block-0 without Uint8Array data field.`);
const chunk = block_data.subarray(offset, offset + count);
resolve(chunk);
};
}
else if(block_number_start === block_number_end)
{
// Only one block to be read.
const block_offset = block_number_start * INDEXEDDB_STORAGE_BLOCKSIZE;
const block_key = INDEXEDDB_STORAGE_GET_BLOCK_KEY(sha256sum, block_number_start);
const block_request = store.get(block_key);
2021-01-01 02:14:29 +01:00
block_request.onsuccess = async event =>
{
const block_entry = block_request.result;
if(!block_entry)
{
2021-01-01 02:14:29 +01:00
if(!await this.db_has_file(store, sha256sum))
{
resolve(null);
}
2021-01-01 02:14:29 +01:00
else
{
resolve(new Uint8Array(0));
}
return;
}
const block_data = block_entry[INDEXEDDB_STORAGE_DATA_PATH];
dbg_assert(block_data instanceof Uint8Array, "IndexedDBFileStorage read: " +
`Entry for block-${block_number_start} without Uint8Array data field.`);
const chunk_start = Math.max(0, offset - block_offset);
const chunk_end = offset + count - block_offset;
const chunk = block_data.subarray(chunk_start, chunk_end);
resolve(chunk);
};
}
else
{
// Multiple blocks to be read.
const read_data = new Uint8Array(count);
let read_count = 0;
for(let block_number = block_number_start; block_number <= block_number_end; block_number++)
{
const block_offset = block_number * INDEXEDDB_STORAGE_BLOCKSIZE;
const block_key = INDEXEDDB_STORAGE_GET_BLOCK_KEY(sha256sum, block_number);
const block_request = store.get(block_key);
2021-01-01 02:14:29 +01:00
block_request.onsuccess = async event =>
{
const block_entry = block_request.result;
if(!block_entry)
{
// If the first requested block doesn't exist, then the remaining blocks
// cannot exist.
if(block_number === block_number_start)
{
2021-01-01 02:14:29 +01:00
if(!await this.db_has_file(store, sha256sum))
{
// Not aborting transaction here because:
// - Abort is treated like an error,
// - AbortError sometimes indicate a different error we want to notice,
// - Most read calls only read a single block anyway.
resolve(null);
}
}
return;
}
const block_data = block_entry[INDEXEDDB_STORAGE_DATA_PATH];
dbg_assert(block_data instanceof Uint8Array, "IndexedDBFileStorage read: " +
`Entry for block-${block_number} without Uint8Array data field.`);
const chunk_start = Math.max(0, offset - block_offset);
const chunk_end = offset + count - block_offset;
const chunk = block_data.subarray(chunk_start, chunk_end);
read_data.set(chunk, block_offset + chunk_start - offset);
read_count += chunk.length;
};
}
transaction.oncomplete = event =>
{
resolve(read_data.subarray(0, read_count));
};
}
});
};
/**
* @param {string} sha256sum
* @param {!Uint8Array} data
*/
IndexedDBFileStorage.prototype.set = function(sha256sum, data)
{
dbg_assert(sha256sum, "IndexedDBFileStorage set: sha256sum should be a non-empty string");
const transaction = this.db.transaction(INDEXEDDB_STORAGE_STORE, "readwrite");
transaction.onerror = event =>
{
const error = event.target.error;
dbg_log(`IndexedDBFileStorage set: Error with transaction: ${error}`, LOG_9P);
throw error;
};
const store = transaction.objectStore(INDEXEDDB_STORAGE_STORE);
// Ensure at least a single entry is added for empty files.
const offset_upper_bound = data.length || 1;
for(let i = 0, offset = 0; offset < offset_upper_bound; i++, offset += INDEXEDDB_STORAGE_BLOCKSIZE)
{
const block_key = INDEXEDDB_STORAGE_GET_BLOCK_KEY(sha256sum, i);
Filestorage: Clone array views to avoid serializing full array buffer IndexedDB uses the StructuredSerializeForStorage algorithm to store records into the database. It appears that the html5 spec defines the StructuredSerializeForStorage algorithm in a way that, when we serialize a TypedArray, the associated ArrayBuffer is also serialized. Meaning even if the TypedArray is a view of a single byte in the ArrayBuffer, the potentially-megabytes of data in the ArrayBuffer are also copied over. This makes sense. After deserialization, you'd expect the TypedArray to function the same way as before meaning it should have the same `.byteOffset` and `.buffer` properties. However, it is easy to overlook this and think that a TypedArray viewing 2 bytes will only have the 2 bytes stored in the database. This commit fixes a source of crashes. When calling IndexedDBFileStorage#set() with a 3Mb file, the file is to be stored as approx 800 lots of 4k blocks. However, in reality, 800 lots of 3 Mb array buffers are being stored into the database. As the transaction is not committed until the last request has been placed, I suspect that Chrome was trying to store those 2.4 Gb worth of data in memory and did not handle this case properly, leading to out-of-memory. This commit also fixes a source of poor performance. When calling IndexedDBFileStorage#read() to read 4k of a 48 Mb file, the browser seems to be deserializing the entire 48 Mb into memory and garbage collecting it even though only 4k is needed.
2018-11-25 02:05:04 +01:00
// Note: Without cloning, the entire backing ArrayBuffer is serialized into the database.
const block_data = data.slice(offset, offset + INDEXEDDB_STORAGE_BLOCKSIZE);
store.put({
[INDEXEDDB_STORAGE_KEY_PATH]: block_key,
[INDEXEDDB_STORAGE_DATA_PATH]: block_data,
});
}
return new Promise((resolve, reject) => {
transaction.oncomplete = event => resolve();
});
};
/**
* @param {string} sha256sum
*/
IndexedDBFileStorage.prototype.uncache = function(sha256sum)
{
// No-op.
};
/**
* @constructor
* @implements {FileStorageInterface}
* @param {FileStorageInterface} file_storage
* @param {string} baseurl
*/
function ServerFileStorageWrapper(file_storage, baseurl)
{
dbg_assert(baseurl, "ServerMemoryFileStorage: baseurl should not be empty");
this.storage = file_storage;
this.baseurl = baseurl;
}
/**
* @param {string} sha256sum
* @return {!Promise<Uint8Array>}
*/
ServerFileStorageWrapper.prototype.load_from_server = function(sha256sum)
{
return new Promise((resolve, reject) =>
{
v86util.load_file(this.baseurl + sha256sum, { done: buffer =>
{
const data = new Uint8Array(buffer);
this.set(sha256sum, data).then(() => resolve(data));
}});
});
};
/**
* @param {string} sha256sum
* @param {number} offset
* @param {number} count
* @return {!Promise<Uint8Array>}
*/
2021-01-01 02:14:29 +01:00
ServerFileStorageWrapper.prototype.read = async function(sha256sum, offset, count)
{
2021-01-01 02:14:29 +01:00
const data = await this.storage.read(sha256sum, offset, count);
if(!data)
{
2021-01-01 02:14:29 +01:00
const full_file = await this.load_from_server(sha256sum);
return full_file.subarray(offset, offset + count);
}
return data;
2021-01-01 02:14:29 +01:00
};
/**
* @param {string} sha256sum
* @param {!Uint8Array} data
*/
2021-01-01 02:14:29 +01:00
ServerFileStorageWrapper.prototype.set = async function(sha256sum, data)
{
2021-01-01 02:14:29 +01:00
return await this.storage.set(sha256sum, data);
};
/**
* @param {string} sha256sum
*/
ServerFileStorageWrapper.prototype.uncache = function(sha256sum)
{
this.storage.uncache(sha256sum);
};
2018-11-20 00:28:41 +01:00
// Closure Compiler's way of exporting
if(typeof window !== "undefined")
{
window["MemoryFileStorage"] = MemoryFileStorage;
window["IndexedDBFileStorage"] = IndexedDBFileStorage;
window["ServerFileStorageWrapper"] = ServerFileStorageWrapper;
}
else if(typeof module !== "undefined" && typeof module.exports !== "undefined")
{
module.exports["MemoryFileStorage"] = MemoryFileStorage;
module.exports["IndexedDBFileStorage"] = IndexedDBFileStorage;
module.exports["ServerFileStorageWrapper"] = ServerFileStorageWrapper;
}
else if(typeof importScripts === "function")
{
// web worker
self["MemoryFileStorage"] = MemoryFileStorage;
self["IndexedDBFileStorage"] = IndexedDBFileStorage;
self["ServerFileStorageWrapper"] = ServerFileStorageWrapper;
}