Skip to content

Commit

Permalink
Merge pull request #121 from rhashimoto/file-demo
Browse files Browse the repository at this point in the history
Create database import demo
  • Loading branch information
rhashimoto authored Oct 31, 2023
2 parents b3abe74 + d69169d commit 9f5f748
Show file tree
Hide file tree
Showing 3 changed files with 212 additions and 0 deletions.
18 changes: 18 additions & 0 deletions demo/file/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Local file transfer</title>
<style>
pre { margin: 0 }
</style>
<script type="module" defer src="./index.js"></script>
</head>
<body>
<h1>Sample database importer</h1>
<div>
<input type="file" id="file-import">
</div>
<hr>
</body>
</html>
169 changes: 169 additions & 0 deletions demo/file/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
import * as VFS from "../../src/VFS";
import { IDBBatchAtomicVFS } from "../../src/examples/IDBBatchAtomicVFS";

const SEARCH_PARAMS = new URLSearchParams(location.search);
const IDB_NAME = SEARCH_PARAMS.get('idb') ?? 'sqlite-vfs';
const DB_NAME = SEARCH_PARAMS.get('db') ?? 'sqlite.db';

const DBFILE_MAGIC = 'SQLite format 3\x00';

document.getElementById('file-import').addEventListener('change', async event => {
let vfs;
try {
log(`Importing to IndexedDB ${IDB_NAME}, path ${DB_NAME}`);
vfs = new IDBBatchAtomicVFS(IDB_NAME);
// @ts-ignore
await importDatabase(vfs, DB_NAME, event.target.files[0].stream());
log('Import complete');

log('Verifying database integrity');
const url = new URL('./verifier.js', location.href);
url.searchParams.set('idb', IDB_NAME);
url.searchParams.set('db', DB_NAME);
const worker = new Worker(url, { type: 'module' });
await new Promise(resolve => {
worker.addEventListener('message', ({data}) => {
resolve();
for (const row of data) {
log(`integrity result: ${row}`);
}
worker.terminate();
});
});
log('Verification complete');
} catch (e) {
log(e.toString());
throw e;
} finally {
vfs?.close();
}
});

/**
* @param {VFS.Base} vfs
* @param {string} path
* @param {ReadableStream} stream
*/
async function importDatabase(vfs, path, stream) {
async function* pagify() {
/** @type {Uint8Array[]} */ const chunks = [];
const reader = stream.getReader();

// Read at least the file header fields we need.
log('Reading file header...');
while (chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0) < 32) {
const { done, value } = await reader.read();
if (done) throw new Error('Unexpected end of file');
chunks.push(value);
}

// Assemble the file header.
let copyOffset = 0;
const header = new DataView(new ArrayBuffer(32));
for (const chunk of chunks) {
const dst = new Uint8Array(header.buffer, copyOffset);
dst.set(chunk.subarray(0, header.byteLength - copyOffset));
}

if (new TextDecoder().decode(header.buffer.slice(0, 16)) !== DBFILE_MAGIC) {
throw new Error('Not a SQLite database file');
}

// Extract page parameters.
const pageSize = (field => field === 1 ? 65536 : field)(header.getUint16(16));
const pageCount = header.getUint32(28);
log(`${pageCount} pages, ${pageSize} bytes each, ${pageCount * pageSize} bytes total`);

log('Copying pages...');
for (let i = 0; i < pageCount; ++i) {
// Read enough chunks to produce the next page.
while (chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0) < pageSize) {
const { done, value } = await reader.read();
if (done) throw new Error('Unexpected end of file');
chunks.push(value);
}

// Assemble the page.
// TODO: Optimize case where first chunk has >= pageSize bytes.
let copyOffset = 0;
const page = new Uint8Array(pageSize);
while (copyOffset < pageSize) {
// Copy bytes into the page.
const src = chunks[0].subarray(0, pageSize - copyOffset);
const dst = new Uint8Array(page.buffer, copyOffset);
dst.set(src);

copyOffset += src.byteLength;
if (src.byteLength === chunks[0].byteLength) {
// All the bytes in the chunk were consumed.
chunks.shift();
} else {
chunks[0] = chunks[0].subarray(src.byteLength);
}
}

yield page;
}

const { done } = await reader.read();
if (!done) throw new Error('Unexpected data after last page');
};

const onFinally = [];
try {
log(`Deleting ${path}...`);
await vfs.xDelete(path, 1);

// Create the file.
log(`Creating ${path}...`);
const fileId = 1234;
const flags = VFS.SQLITE_OPEN_MAIN_DB | VFS.SQLITE_OPEN_CREATE | VFS.SQLITE_OPEN_READWRITE;
await check(vfs.xOpen(path, fileId, flags, new DataView(new ArrayBuffer(4))));
onFinally.push(() => vfs.xClose(fileId));

// Open a "transaction".
await check(vfs.xLock(fileId, VFS.SQLITE_LOCK_SHARED));
onFinally.push(() => vfs.xUnlock(fileId, VFS.SQLITE_LOCK_NONE));
await check(vfs.xLock(fileId, VFS.SQLITE_LOCK_RESERVED));
onFinally.push(() => vfs.xUnlock(fileId, VFS.SQLITE_LOCK_SHARED));
await check(vfs.xLock(fileId, VFS.SQLITE_LOCK_EXCLUSIVE));

const empty = new DataView(new ArrayBuffer(4));
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_BEGIN_ATOMIC_WRITE, empty);

// Write pages.
let iOffset = 0;
for await (const page of pagify()) {
await check(vfs.xWrite(fileId, page, iOffset));
iOffset += page.byteLength;
}

await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_COMMIT_ATOMIC_WRITE, empty);
await vfs.xFileControl(fileId, VFS.SQLITE_FCNTL_SYNC, empty);
await vfs.xSync(fileId, VFS.SQLITE_SYNC_NORMAL);
} finally {
while (onFinally.length) {
await onFinally.pop()();
}
}
}

function log(...args) {
const timestamp = new Date().toLocaleTimeString(undefined, {
hour12: false,
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
fractionalSecondDigits: 3
});

const element = document.createElement('pre');
element.textContent = `${timestamp} ${args.join(' ')}`;
document.body.append(element);
}

async function check(code) {
if (await code !== VFS.SQLITE_OK) {
throw new Error(`Error code: ${code}`);
}
}
25 changes: 25 additions & 0 deletions demo/file/verifier.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import SQLiteESMFactory from '../../dist/wa-sqlite-async.mjs';
import * as SQLite from '../../src/sqlite-api.js';
import { IDBBatchAtomicVFS } from '../../src/examples/IDBBatchAtomicVFS.js';

const SEARCH_PARAMS = new URLSearchParams(location.search);
const IDB_NAME = SEARCH_PARAMS.get('idb') ?? 'sqlite-vfs';
const DB_NAME = SEARCH_PARAMS.get('db') ?? 'sqlite.db';

(async function() {
const module = await SQLiteESMFactory();
const sqlite3 = SQLite.Factory(module);

const vfs = new IDBBatchAtomicVFS(IDB_NAME);
sqlite3.vfs_register(vfs, true);

const db = await sqlite3.open_v2(DB_NAME, SQLite.SQLITE_OPEN_READWRITE, IDB_NAME);

const results = []
await sqlite3.exec(db, 'PRAGMA integrity_check;', (row, columns) => {
results.push(row[0]);
});
await sqlite3.close(db);

postMessage(results);
})();

0 comments on commit 9f5f748

Please sign in to comment.