From 1fe6d7820c63f5e1c268fe481d54502f9484806f Mon Sep 17 00:00:00 2001 From: Roy Hashimoto Date: Tue, 31 Oct 2023 14:26:13 -0700 Subject: [PATCH 1/2] Create file import demo. --- demo/file/index.html | 21 ++++++ demo/file/index.js | 149 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+) create mode 100644 demo/file/index.html create mode 100644 demo/file/index.js diff --git a/demo/file/index.html b/demo/file/index.html new file mode 100644 index 00000000..b590e31d --- /dev/null +++ b/demo/file/index.html @@ -0,0 +1,21 @@ + + + + + Local file transfer + + + + +
+ +
+
+ +
+
+ + \ No newline at end of file diff --git a/demo/file/index.js b/demo/file/index.js new file mode 100644 index 00000000..4df2c1be --- /dev/null +++ b/demo/file/index.js @@ -0,0 +1,149 @@ +import * as VFS from "../../src/VFS"; +import { IDBBatchAtomicVFS } from "../../src/examples/IDBBatchAtomicVFS"; + +const SEARCH_PARAMS = new URLSearchParams(location.search); +const IDB_NAME = SEARCH_PARAMS.get('idb') ?? 'sqlite-vfs'; +const DB_NAME = SEARCH_PARAMS.get('db') ?? 'sqlite.db'; + +const DBFILE_MAGIC = 'SQLite format 3\x00'; + +document.getElementById('file-import').addEventListener('change', async event => { + let vfs; + try { + log('Importing database...'); + vfs = new IDBBatchAtomicVFS(IDB_NAME); + // @ts-ignore + await importDatabase(vfs, DB_NAME, event.target.files[0].stream()); + log('Import complete'); + } catch (e) { + log(e.toString()); + throw e; + } finally { + vfs?.close(); + } +}); + +/** + * @param {VFS.Base} vfs + * @param {string} path + * @param {ReadableStream} stream + */ +async function importDatabase(vfs, path, stream) { + async function* pagify() { + /** @type {Uint8Array[]} */ const chunks = []; + const reader = stream.getReader(); + + // Read at least the file header fields we need. + while (chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0) < 32) { + const { done, value } = await reader.read(); + if (done) throw new Error('Unexpected end of file'); + chunks.push(value); + } + + // Assemble the file header. + let copyOffset = 0; + const header = new DataView(new ArrayBuffer(32)); + for (const chunk of chunks) { + const dst = new Uint8Array(header.buffer, copyOffset); + dst.set(chunk.subarray(0, header.byteLength - copyOffset)); + } + + if (new TextDecoder().decode(header.buffer.slice(0, 16)) !== DBFILE_MAGIC) { + throw new Error('Not a SQLite database file'); + } + + // Extract page parameters. + const pageSize = (field => field === 1 ? 65536 : field)(header.getUint16(16)); + const pageCount = header.getUint32(28); + + for (let i = 0; i < pageCount; ++i) { + // Read enough chunks to produce the next page. + while (chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0) < pageSize) { + const { done, value } = await reader.read(); + if (done) throw new Error('Unexpected end of file'); + chunks.push(value); + } + + // Assemble the page. + // TODO: Optimize case where first chunk has >= pageSize bytes. + let copyOffset = 0; + const page = new Uint8Array(pageSize); + while (copyOffset < pageSize) { + // Copy bytes into the page. + const src = chunks[0].subarray(0, pageSize - copyOffset); + const dst = new Uint8Array(page.buffer, copyOffset); + dst.set(src); + + copyOffset += src.byteLength; + if (src.byteLength === chunks[0].byteLength) { + // All the bytes in the chunk were consumed. + chunks.shift(); + } else { + chunks[0] = chunks[0].subarray(src.byteLength); + } + } + + yield page; + } + + const { done } = await reader.read(); + if (!done) throw new Error('Unexpected data after last page'); + }; + + const onFinally = []; + try { + // Delete any existing file. + await vfs.xDelete(path, 0); + + // Create the file. + const fileId = 1234; + const flags = VFS.SQLITE_OPEN_MAIN_DB | VFS.SQLITE_OPEN_CREATE | VFS.SQLITE_OPEN_READWRITE; + await check(vfs.xOpen(path, fileId, flags, new DataView(new ArrayBuffer(4)))); + onFinally.push(() => vfs.xClose(fileId)); + + // Open a "transaction". + await check(vfs.xLock(fileId, VFS.SQLITE_LOCK_SHARED)); + onFinally.push(() => vfs.xUnlock(fileId, VFS.SQLITE_LOCK_NONE)); + await check(vfs.xLock(fileId, VFS.SQLITE_LOCK_RESERVED)); + onFinally.push(() => vfs.xUnlock(fileId, VFS.SQLITE_LOCK_SHARED)); + await check(vfs.xLock(fileId, VFS.SQLITE_LOCK_EXCLUSIVE)); + + const empty = new DataView(new ArrayBuffer(4)); + await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_BEGIN_ATOMIC_WRITE, empty); + + // Write pages. + let iOffset = 0; + for await (const page of pagify()) { + await check(vfs.xWrite(fileId, page, iOffset)); + iOffset += page.byteLength; + } + + await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_COMMIT_ATOMIC_WRITE, empty); + await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_SYNC, empty); + await vfs.xSync(fileId, VFS.SQLITE_SYNC_NORMAL); + } finally { + while (onFinally.length) { + await onFinally.pop()(); + } + } +} + +function log(...args) { + const timestamp = new Date().toLocaleTimeString(undefined, { + hour12: false, + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + fractionalSecondDigits: 3 + }); + + const element = document.createElement('pre'); + element.textContent = `${timestamp} ${args.join(' ')}`; + document.body.append(element); +} + +async function check(code) { + if (await code !== VFS.SQLITE_OK) { + throw new Error(`Error code: ${code}`); + } +} \ No newline at end of file From d69169d880addb371f6ea8f4ed24ce453701766d Mon Sep 17 00:00:00 2001 From: Roy Hashimoto Date: Tue, 31 Oct 2023 15:30:33 -0700 Subject: [PATCH 2/2] Add file import verifier. --- demo/file/index.html | 5 +---- demo/file/index.js | 26 +++++++++++++++++++++++--- demo/file/verifier.js | 25 +++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 7 deletions(-) create mode 100644 demo/file/verifier.js diff --git a/demo/file/index.html b/demo/file/index.html index b590e31d..817a8ad6 100644 --- a/demo/file/index.html +++ b/demo/file/index.html @@ -4,18 +4,15 @@ Local file transfer +

Sample database importer

-
- -

\ No newline at end of file diff --git a/demo/file/index.js b/demo/file/index.js index 4df2c1be..779f6ebf 100644 --- a/demo/file/index.js +++ b/demo/file/index.js @@ -10,11 +10,27 @@ const DBFILE_MAGIC = 'SQLite format 3\x00'; document.getElementById('file-import').addEventListener('change', async event => { let vfs; try { - log('Importing database...'); + log(`Importing to IndexedDB ${IDB_NAME}, path ${DB_NAME}`); vfs = new IDBBatchAtomicVFS(IDB_NAME); // @ts-ignore await importDatabase(vfs, DB_NAME, event.target.files[0].stream()); log('Import complete'); + + log('Verifying database integrity'); + const url = new URL('./verifier.js', location.href); + url.searchParams.set('idb', IDB_NAME); + url.searchParams.set('db', DB_NAME); + const worker = new Worker(url, { type: 'module' }); + await new Promise(resolve => { + worker.addEventListener('message', ({data}) => { + resolve(); + for (const row of data) { + log(`integrity result: ${row}`); + } + worker.terminate(); + }); + }); + log('Verification complete'); } catch (e) { log(e.toString()); throw e; @@ -34,6 +50,7 @@ async function importDatabase(vfs, path, stream) { const reader = stream.getReader(); // Read at least the file header fields we need. + log('Reading file header...'); while (chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0) < 32) { const { done, value } = await reader.read(); if (done) throw new Error('Unexpected end of file'); @@ -55,7 +72,9 @@ async function importDatabase(vfs, path, stream) { // Extract page parameters. const pageSize = (field => field === 1 ? 65536 : field)(header.getUint16(16)); const pageCount = header.getUint32(28); + log(`${pageCount} pages, ${pageSize} bytes each, ${pageCount * pageSize} bytes total`); + log('Copying pages...'); for (let i = 0; i < pageCount; ++i) { // Read enough chunks to produce the next page. while (chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0) < pageSize) { @@ -92,10 +111,11 @@ async function importDatabase(vfs, path, stream) { const onFinally = []; try { - // Delete any existing file. - await vfs.xDelete(path, 0); + log(`Deleting ${path}...`); + await vfs.xDelete(path, 1); // Create the file. + log(`Creating ${path}...`); const fileId = 1234; const flags = VFS.SQLITE_OPEN_MAIN_DB | VFS.SQLITE_OPEN_CREATE | VFS.SQLITE_OPEN_READWRITE; await check(vfs.xOpen(path, fileId, flags, new DataView(new ArrayBuffer(4)))); diff --git a/demo/file/verifier.js b/demo/file/verifier.js new file mode 100644 index 00000000..1a53e864 --- /dev/null +++ b/demo/file/verifier.js @@ -0,0 +1,25 @@ +import SQLiteESMFactory from '../../dist/wa-sqlite-async.mjs'; +import * as SQLite from '../../src/sqlite-api.js'; +import { IDBBatchAtomicVFS } from '../../src/examples/IDBBatchAtomicVFS.js'; + +const SEARCH_PARAMS = new URLSearchParams(location.search); +const IDB_NAME = SEARCH_PARAMS.get('idb') ?? 'sqlite-vfs'; +const DB_NAME = SEARCH_PARAMS.get('db') ?? 'sqlite.db'; + +(async function() { + const module = await SQLiteESMFactory(); + const sqlite3 = SQLite.Factory(module); + + const vfs = new IDBBatchAtomicVFS(IDB_NAME); + sqlite3.vfs_register(vfs, true); + + const db = await sqlite3.open_v2(DB_NAME, SQLite.SQLITE_OPEN_READWRITE, IDB_NAME); + + const results = [] + await sqlite3.exec(db, 'PRAGMA integrity_check;', (row, columns) => { + results.push(row[0]); + }); + await sqlite3.close(db); + + postMessage(results); +})(); \ No newline at end of file