Skip to content

Commit

Permalink
WebGPU backend added
Browse files Browse the repository at this point in the history
minor optimisation to model normalisation
  • Loading branch information
Mattk70 committed Nov 4, 2023
1 parent 68471fd commit 74bab9d
Show file tree
Hide file tree
Showing 5 changed files with 125 additions and 3 deletions.
7 changes: 7 additions & 0 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -433,6 +433,13 @@ <h5>Saved Records</h5>
GPU
</label>
</div>
<div class="form-check">
<input class="form-check-input" type="radio"
name="backend" id="wasm" value="webgpu">
<label class="form-check-label" for="webgpu">
WebGPU
</label>
</div>
</div>
<div class="col"><span class="circle"
title="The GPU backend may speed up processing if you have a dedicated graphics card.">?</span>
Expand Down
5 changes: 3 additions & 2 deletions js/model.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const tf = require('@tensorflow/tfjs-node');
require('@tensorflow/tfjs-backend-webgpu');
const fs = require('fs');
const path = require('path');
let DEBUG = false;
Expand Down Expand Up @@ -213,9 +214,9 @@ class Model {

normalise(spec) {
return tf.tidy(() => {
const spec_max = tf.max(spec, [1, 2]).reshape([-1, 1, 1, 1])
const spec_max = tf.max(spec, [1, 2], true)
if (this.version === 'v4'){
const spec_min = tf.min(spec, [1, 2]).reshape([-1, 1, 1, 1])
const spec_min = tf.min(spec, [1, 2], true)
spec = tf.sub(spec, spec_min).div(tf.sub(spec_max, spec_min));
} else {
spec = spec.mul(255);
Expand Down
113 changes: 113 additions & 0 deletions js/spectrogram.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
const tf = require('@tensorflow/tfjs-node');
const DEBUG = false;
class PreprocessSpectrogramLayer extends tf.layers.Layer {
constructor(config) {
super(config);
this.imgHeight = config.imgHeight;
this.imgWidth = config.imgWidth;
this.version = config.version;
}

call(inputs) {
return tf.tidy(() => {
const spec_max = tf.max(inputs, [1, 2], true);
if (this.version === 'v4') {
const spec_min = tf.min(inputs, [1, 2], true);
const normalized = tf.div(tf.sub(inputs, spec_min), tf.sub(spec_max, spec_min));
return normalized;
} else {
const scaled = tf.mul(inputs, 255).div(spec_max);
return scaled;
}
});
}


build(inputShape) {
this.inputSpec = [{ shape: [null, inputShape[1], inputShape[2], inputShape[3]] }];
return this;
}

static get className() {
return 'PreprocessSpectrogramLayer';
}
}
let preprocessLayer

onmessage = async (e) => {
const message = e.data.message;

if (message === 'load'){
const backend = e.data.backend;
tf.setBackend(backend).then(async () => {
if (backend === 'webgl') {
tf.env().set('WEBGL_FORCE_F16_TEXTURES', true);
tf.env().set('WEBGL_PACK', true);
tf.env().set('WEBGL_EXP_CONV', true);
tf.env().set('TOPK_K_CPU_HANDOFF_THRESHOLD', 128)
tf.env().set('TOPK_LAST_DIM_CPU_HANDOFF_SIZE_THRESHOLD', 0);
}
tf.enableProdMode();
if (DEBUG) {
console.log(tf.env());
console.log(tf.env().getFlags());
}
const config = e.data.config;
preprocessLayer = new PreprocessSpectrogramLayer(config);
console.log('Layer loaded')
})

} else {
let {audio, start,fileStart, file, snr, worker, threshold, confidence} = e.data.payload;
if (DEBUG) console.log('predictCunk begin', tf.memory().numTensors);
audio = tf.tensor1d(audio);

// check if we need to pad
const remainder = audio.shape % 72000;
let paddedBuffer;
if (remainder !== 0) {
// Pad to the nearest full sample
paddedBuffer = audio.pad([[0, 72000 - remainder]]);
audio.dispose();
if (DEBUG) console.log('Received final chunks')
}
const buffer = paddedBuffer || audio;
const numSamples = buffer.shape / 72000;
let bufferList = tf.split(buffer, numSamples);
buffer.dispose();
// Turn the audio into a spec tensor
// bufferList = tf.tidy(() => {
// return bufferList.map(x => {
// return this.version === 'v4' ? this.makeSpectrogram(x) : this.makeSpectrogram(this.normalise_audio(x));
// })
// });

const specBatch = makeSpectrogramBatch(bufferList);
//const specBatch = tf.stack(bufferList);
const batchKeys = [...Array(numSamples).keys()].map(i => start + 72000 * i);
postMessage({
message: 'specs',
specBatch: specBatch.arraySync(),
batchKeys: batchKeys,
threshold: threshold,
confidence: confidence,
file: file,
fileStart: fileStart,
worker: worker
})
specBatch.dispose()
}
}

function makeSpectrogramBatch(signalBatch) {
return tf.tidy(() => {
const specBatch = signalBatch.map(signal => {
// const sigMax = tf.max(signal);
// const sigMin = tf.min(signal);
// const range = sigMax.sub(sigMin);
// const normalizedSignal = signal.sub(sigMin).div(range).mul(2).sub(1);
return tf.abs(tf.signal.stft(signal, 512, 186));
});
return tf.stack(specBatch);
});
}
1 change: 1 addition & 0 deletions js/ui.js
Original file line number Diff line number Diff line change
Expand Up @@ -1327,6 +1327,7 @@ window.onload = async () => {
warmup: true,
backend: 'tensorflow',
tensorflow: { threads: diagnostics['Cores'], batchSize: 32 },
webgpu: { threads: 2, batchSize: 32 },
webgl: { threads: 2, batchSize: 32 },
audio: { format: 'mp3', bitrate: 192, quality: 5, downmix: false, padding: false, fade: false },
limit: 500,
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@
"homepage": "https://github.com/mattk70/Chirpity-Electron#readme",
"devDependencies": {
"@playwright/test": "^1.39.0",
"@tensorflow/tfjs-converter": "4.10.0",
"electron": "^25.3.0",
"electron-builder": "24.6.4",
"electron-playwright-helpers": "^1.6.0",
Expand All @@ -186,6 +185,7 @@
"dependencies": {
"@fast-csv/format": "^4.3.5",
"@popperjs/core": "^2.9.2",
"@tensorflow/tfjs-backend-webgpu": "^4.12.0",
"@tensorflow/tfjs-node": "^4.12.0",
"axios": "1.5.1",
"bootstrap": "5.2.2",
Expand Down

0 comments on commit 74bab9d

Please sign in to comment.