Skip to content

Commit aa90aa0

Browse files
[Node.js] Bundle PowerSync Core (#733)
1 parent 688265f commit aa90aa0

File tree

9 files changed

+184
-100
lines changed

9 files changed

+184
-100
lines changed

.changeset/seven-fireants-boil.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
---
2+
'@powersync/node': minor
3+
---
4+
5+
Pre-package all the PowerSync Rust extension binaries for all supported platforms and architectures in the NPM package `lib` folder. Install scripts are no longer required to download the PowerSync core.
6+
7+
The binary files relevant to a specific architecture now have updated filenames. Custom code which previously referenced binary filenames requires updating. A helper function is available to automatically provide the correct filename.
8+
9+
```diff
10+
+ import { getPowerSyncExtensionFilename } from '@powersync/node/worker.js';
11+
12+
function resolvePowerSyncCoreExtension() {
13+
- const platform = OS.platform();
14+
- let extensionPath: string;
15+
- if (platform === 'win32') {
16+
- extensionPath = 'powersync.dll';
17+
- } else if (platform === 'linux') {
18+
- extensionPath = 'libpowersync.so';
19+
- } else if (platform === 'darwin') {
20+
- extensionPath = 'libpowersync.dylib';
21+
- } else {
22+
- throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
23+
- }
24+
+ const extensionPath = getPowerSyncExtensionFilename();
25+
26+
// This example uses copy-webpack-plugin to copy the prebuilt library over. This ensures that it is
27+
// available in packaged release builds.
28+
let libraryPath = path.resolve(__dirname, 'powersync', extensionPath);
29+
```

demos/example-electron-node/README.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@ In particular:
99
queries. This worker is part of the `@powersync/node` package and wouldn't be copied into the resulting Electron
1010
app by default. For this reason, this example has its own `src/main/worker.ts` loaded with `new URL('./worker.ts', import.meta.url)`.
1111
2. In addition to the worker, PowerSync requires access to a SQLite extension providing sync functionality.
12-
This file is also part of the `@powersync/node` package and called `powersync.dll`, `libpowersync.dylib` or
13-
`libpowersync.so` depending on the operating system.
12+
This file is also part of the `@powersync/node` package and is the prebuilt release asset (for example
13+
`powersync_x64.dll`, `libpowersync_x64.dylib` or `libpowersync_x64.so`) depending on the operating system and
14+
architecture.
1415
We use the `copy-webpack-plugin` package to make sure a copy of that file is available to the main process,
1516
and load it in the custom `src/main/worker.ts`.
1617
3. The `get()` and `getAll()` methods are exposed to the renderer process with an IPC channel.
@@ -21,7 +22,7 @@ To see it in action:
2122
2. Copy `.env.local.template` to `.env.local`, and complete the environment variables. You can generate a [temporary development token](https://docs.powersync.com/usage/installation/authentication-setup/development-tokens), or leave blank to test with local-only data.
2223
The example works with the schema from the [PowerSync + Supabase tutorial](https://docs.powersync.com/integration-guides/supabase-+-powersync#supabase-powersync).
2324
3. `cd` into this directory. In this mono-repo, you'll have to run `./node_modules/.bin/electron-rebuild` once to make sure `@powersync/better-sqlite3` was compiled with Electron's toolchain.
24-
3. Finally, run `pnpm start`.
25+
4. Finally, run `pnpm start`.
2526

2627
Apart from the build setup, this example is purposefully kept simple.
2728
To make sure PowerSync is working, you can run `await powersync.get('SELECT powersync_rs_version()');` in the DevTools

demos/example-electron-node/config.ts

Lines changed: 9 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,17 @@
1-
import OS from 'node:os';
21
import path from 'node:path';
32

4-
import type { ForgeConfig } from '@electron-forge/shared-types';
5-
import { MakerSquirrel } from '@electron-forge/maker-squirrel';
6-
import { MakerZIP } from '@electron-forge/maker-zip';
73
import { MakerDeb } from '@electron-forge/maker-deb';
84
import { MakerRpm } from '@electron-forge/maker-rpm';
5+
import { MakerSquirrel } from '@electron-forge/maker-squirrel';
6+
import { MakerZIP } from '@electron-forge/maker-zip';
97
import { AutoUnpackNativesPlugin } from '@electron-forge/plugin-auto-unpack-natives';
108
import { WebpackPlugin } from '@electron-forge/plugin-webpack';
11-
import { type Configuration, type ModuleOptions, type DefinePlugin } from 'webpack';
9+
import type { ForgeConfig } from '@electron-forge/shared-types';
10+
import { getPowerSyncExtensionFilename } from '@powersync/node/worker.js';
11+
import type ICopyPlugin from 'copy-webpack-plugin';
1212
import * as dotenv from 'dotenv';
1313
import type IForkTsCheckerWebpackPlugin from 'fork-ts-checker-webpack-plugin';
14-
import type ICopyPlugin from 'copy-webpack-plugin';
15-
14+
import { type Configuration, type DefinePlugin, type ModuleOptions } from 'webpack';
1615
dotenv.config({ path: '.env.local' });
1716

1817
const ForkTsCheckerWebpackPlugin: typeof IForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin');
@@ -57,17 +56,7 @@ const defaultWebpackRules: () => Required<ModuleOptions>['rules'] = () => {
5756
];
5857
};
5958

60-
const platform = OS.platform();
61-
let extensionPath: string;
62-
if (platform === 'win32') {
63-
extensionPath = 'powersync.dll';
64-
} else if (platform === 'linux') {
65-
extensionPath = 'libpowersync.so';
66-
} else if (platform === 'darwin') {
67-
extensionPath = 'libpowersync.dylib';
68-
} else {
69-
throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
70-
}
59+
let extensionFilename = getPowerSyncExtensionFilename();
7160

7261
const mainConfig: Configuration = {
7362
/**
@@ -84,8 +73,8 @@ const mainConfig: Configuration = {
8473
new CopyPlugin({
8574
patterns: [
8675
{
87-
from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionPath}`),
88-
to: path.join('powersync', extensionPath)
76+
from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionFilename}`),
77+
to: path.join('powersync', extensionFilename)
8978
}
9079
]
9180
}),

demos/example-electron-node/src/main/worker.ts

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,14 @@
1-
import * as path from 'node:path';
2-
import OS from 'node:os';
31
import Database from 'better-sqlite3';
2+
import * as path from 'node:path';
43

5-
import { startPowerSyncWorker } from '@powersync/node/worker.js';
4+
import { getPowerSyncExtensionFilename, startPowerSyncWorker } from '@powersync/node/worker.js';
65

76
function resolvePowerSyncCoreExtension() {
8-
const platform = OS.platform();
9-
let extensionPath: string;
10-
if (platform === 'win32') {
11-
extensionPath = 'powersync.dll';
12-
} else if (platform === 'linux') {
13-
extensionPath = 'libpowersync.so';
14-
} else if (platform === 'darwin') {
15-
extensionPath = 'libpowersync.dylib';
16-
} else {
17-
throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
18-
}
7+
const extensionFilename = getPowerSyncExtensionFilename();
198

209
// This example uses copy-webpack-plugin to copy the prebuilt library over. This ensures that it is
2110
// available in packaged release builds.
22-
let libraryPath = path.resolve(__dirname, 'powersync', extensionPath);
11+
let libraryPath = path.resolve(__dirname, 'powersync', extensionFilename);
2312

2413
if (__dirname.indexOf('app.asar') != -1) {
2514
// Our build configuration ensures the extension is always available outside of the archive too.

packages/node/download_core.js

Lines changed: 75 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,35 @@
1-
// TODO: Make this a pre-publish hook and just bundle everything
21
import { createHash } from 'node:crypto';
3-
import * as OS from 'node:os';
42
import * as fs from 'node:fs/promises';
53
import * as path from 'node:path';
64
import { Readable } from 'node:stream';
75
import { finished } from 'node:stream/promises';
8-
import { exit } from 'node:process';
96

107
// When changing this version, run node download_core.js update_hashes
118
const version = '0.4.6';
129
const versionHashes = {
1310
'powersync_x64.dll': '5efaa9ad4975094912a36843cb7b503376cacd233d21ae0956f0f4b42dcb457b',
11+
'powersync_x86.dll': '4151ba8aa6f024b50b7aebe52ba59f2c5be54e3fed26f7f3f48e1127dcda027d',
12+
'powersync_aarch64.dll': '3abe46074432593ff5cfc2098b186c592f020c5cfa81285f8e49962732a94bf5',
13+
'libpowersync_x86.so': '1321a7de13fda0b2de7d2bc231a68cb5691f84010f3858e5cf02e47f88ba6f4a',
1414
'libpowersync_x64.so': 'e9d78620d69d3cf7d57353891fe0bf85b79d326b42c4669b9500b9e610388f76',
1515
'libpowersync_aarch64.so': '0d84c0dc0134fc89af65724d11e2c45e3c15569c575ecda52d0ec2fa2aeec495',
16+
'libpowersync_armv7.so': 'c7887181ce9c524b68a7ac284ab447b8584511c87527ca26186e5874bf9ba3d6',
17+
'libpowersync_riscv64gc.so': 'a89f3a71f22f707707d97517e9310e42e2a57dc5343cee08d09002a8cea048d5',
1618
'libpowersync_x64.dylib': '9b484eaf361451f7758ca6ad53190a73563be930a8f8a39ccefd29390046ef6c',
1719
'libpowersync_aarch64.dylib': 'bfb4f1ec207b298aff560f1825f8123d24316edaa27b6df3a17dd49466576b92'
1820
};
1921

20-
const platform = OS.platform();
21-
let destination;
22-
let asset;
23-
24-
if (platform === 'win32') {
25-
asset = 'powersync_x64.dll';
26-
destination = 'powersync.dll';
27-
} else if (platform === 'linux') {
28-
asset = OS.arch() === 'x64' ? 'libpowersync_x64.so' : 'libpowersync_aarch64.so';
29-
destination = 'libpowersync.so';
30-
} else if (platform === 'darwin') {
31-
asset = OS.arch() === 'x64' ? 'libpowersync_x64.dylib' : 'libpowersync_aarch64.dylib';
32-
destination = 'libpowersync.dylib';
33-
}
34-
35-
const expectedHash = versionHashes[asset];
36-
const destinationPath = path.resolve('lib', destination);
22+
const assets = Object.keys(versionHashes);
3723

3824
const hashStream = async (input) => {
3925
for await (const chunk of input.pipe(createHash('sha256')).setEncoding('hex')) {
4026
return chunk;
4127
}
4228
};
4329

44-
const hashLocal = async () => {
30+
const hashLocal = async (filePath) => {
4531
try {
46-
const handle = await fs.open(destinationPath, 'r');
32+
const handle = await fs.open(filePath, 'r');
4733
const input = handle.createReadStream();
4834

4935
const result = await hashStream(input);
@@ -54,31 +40,88 @@ const hashLocal = async () => {
5440
}
5541
};
5642

57-
const download = async () => {
58-
if ((await hashLocal()) == expectedHash) {
59-
console.debug('Local copy is up-to-date, skipping download');
60-
exit(0);
43+
const downloadAsset = async (asset) => {
44+
const destinationPath = path.resolve('lib', asset);
45+
const expectedHash = versionHashes[asset];
46+
47+
// Check if file exists and has correct hash
48+
const currentHash = await hashLocal(destinationPath);
49+
if (currentHash == expectedHash) {
50+
console.debug(`${asset} is up-to-date, skipping download`);
51+
return;
6152
}
6253

6354
const url = `https://github.com/powersync-ja/powersync-sqlite-core/releases/download/v${version}/${asset}`;
55+
console.log(`Downloading ${url}`);
6456
const response = await fetch(url);
6557
if (response.status != 200) {
6658
throw `Could not download ${url}`;
6759
}
6860

61+
const file = await fs.open(destinationPath, 'w');
62+
await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream()));
63+
await file.close();
64+
65+
const hashAfterDownloading = await hashLocal(destinationPath);
66+
if (hashAfterDownloading != expectedHash) {
67+
throw `Unexpected hash after downloading ${asset} (got ${hashAfterDownloading}, expected ${expectedHash})`;
68+
}
69+
console.log(`Successfully downloaded ${asset}`);
70+
};
71+
72+
const checkAsset = async (asset) => {
73+
const destinationPath = path.resolve('lib', asset);
74+
const expectedHash = versionHashes[asset];
75+
const currentHash = await hashLocal(destinationPath);
76+
77+
return {
78+
asset,
79+
destinationPath,
80+
expectedHash,
81+
currentHash,
82+
exists: currentHash !== null,
83+
isValid: currentHash == expectedHash
84+
};
85+
};
86+
87+
const download = async () => {
6988
try {
7089
await fs.access('lib');
7190
} catch {
7291
await fs.mkdir('lib');
7392
}
7493

75-
const file = await fs.open(destinationPath, 'w');
76-
await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream()));
77-
await file.close();
94+
// First check all assets
95+
console.log('Checking existing files...');
96+
const checks = await Promise.all(assets.map((asset) => checkAsset(asset, asset)));
7897

79-
const hashAfterDownloading = await hashLocal();
80-
if (hashAfterDownloading != expectedHash) {
81-
throw `Unexpected hash after downloading (got ${hashAfterDownloading}, expected ${expectedHash})`;
98+
const toDownload = checks.filter((check) => !check.isValid);
99+
const upToDate = checks.filter((check) => check.isValid);
100+
101+
// Print summary
102+
if (upToDate.length > 0) {
103+
console.log('\nUp-to-date files:');
104+
for (const check of upToDate) {
105+
console.log(` ✓ ${check.asset}`);
106+
}
107+
}
108+
109+
if (toDownload.length > 0) {
110+
console.log('\nFiles to download:');
111+
for (const check of toDownload) {
112+
if (!check.exists) {
113+
console.log(` • ${check.asset} (missing)`);
114+
} else {
115+
console.log(` • ${check.asset} (hash mismatch)`);
116+
}
117+
}
118+
119+
console.log('\nStarting downloads...');
120+
await Promise.all(toDownload.map((check) => downloadAsset(check.asset)));
121+
122+
console.log('\nAll downloads completed successfully!');
123+
} else {
124+
console.log('\nAll files are up-to-date, nothing to download.');
82125
}
83126
};
84127

packages/node/package.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,12 @@
1313
"download_core.js"
1414
],
1515
"scripts": {
16-
"install": "node download_core.js",
17-
"build": "tsc -b && rollup --config",
18-
"build:prod": "tsc -b --sourceMap false && rollup --config",
16+
"prepare:core": "node download_core.js",
17+
"build": " pnpm prepare:core && tsc -b && rollup --config",
18+
"build:prod": "pnpm prepare:core && tsc -b --sourceMap false && rollup --config",
1919
"clean": "rm -rf lib dist tsconfig.tsbuildinfo",
2020
"watch": "tsc -b -w",
21-
"test": "vitest",
21+
"test": " pnpm prepare:core && vitest",
2222
"test:exports": "attw --pack . --ignore-rules no-resolution"
2323
},
2424
"type": "module",

packages/node/src/db/NodeSqliteWorker.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
import { threadId } from 'node:worker_threads';
21
import type { DatabaseSync } from 'node:sqlite';
2+
import { threadId } from 'node:worker_threads';
33

4+
import { dynamicImport } from '../utils/modules.js';
45
import { AsyncDatabase, AsyncDatabaseOpenOptions } from './AsyncDatabase.js';
56
import { PowerSyncWorkerOptions } from './SqliteWorker.js';
6-
import { dynamicImport } from '../utils/modules.js';
77

88
class BlockingNodeDatabase implements AsyncDatabase {
99
private readonly db: DatabaseSync;
@@ -57,7 +57,7 @@ export async function openDatabase(worker: PowerSyncWorkerOptions, options: Asyn
5757
const { DatabaseSync } = await dynamicImport('node:sqlite');
5858

5959
const baseDB = new DatabaseSync(options.path, { allowExtension: true });
60-
baseDB.loadExtension(worker.extensionPath());
60+
baseDB.loadExtension(worker.extensionPath(), 'sqlite3_powersync_init');
6161

6262
return new BlockingNodeDatabase(baseDB, options.isWriter);
6363
}

0 commit comments

Comments
 (0)