Skip to content

Commit

Permalink
Merge pull request #85 from andzejsp/main
Browse files Browse the repository at this point in the history
Disable SpeechSynth for Firefox RIP
  • Loading branch information
ParisNeo authored Apr 14, 2023
2 parents ce59ffe + 44dbcdc commit 009927a
Show file tree
Hide file tree
Showing 4 changed files with 455 additions and 443 deletions.
4 changes: 2 additions & 2 deletions personalities/gpt4all_chatbot.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ personality_conditionning: |
welcome_message: "Welcome! I am GPT4All A free and open discussion AI. What can I do for you today?"

# This prefix is added at the beginning of any message input by the user
message_prefix: "\nuser: "
message_prefix: "\nuser:"

# This suffix is added at the end of any message input by the user
message_suffix: "\ngpt4all: "
message_suffix: "\ngpt4all:"

# Here is the list of extensions this personality requires
dependencies: []
Expand Down
243 changes: 123 additions & 120 deletions static/js/audio.js
Original file line number Diff line number Diff line change
@@ -1,143 +1,145 @@
isStarted = false;
isSpeaking = false;
const SpeechRecognition = window.SpeechRecognition || webkitSpeechRecognition;
const recognition = new SpeechRecognition();
const synth = window.speechSynthesis || webkitspeechSynthesis;
var voices = synth.getVoices();
function prepre_audio(){
// Dirty fix for disabling speech synth for firefox browsers :()
if (!userAgent.match(/firefox|fxios/i)) {
isStarted = false;
isSpeaking = false;
const SpeechRecognition = window.SpeechRecognition || webkitSpeechRecognition;
const recognition = new SpeechRecognition();
const synth = window.speechSynthesis || webkitspeechSynthesis;
var voices = synth.getVoices();
function prepre_audio() {
recognition.continuous = true;
recognition.interimResults = true;
recognition.maxAlternatives = 10;
language_select = document.getElementById("language")
}
voices = [];
function populateVoicesList() {
voices = synth.getVoices();
voice_select = document.getElementById("voice")
voice_select.innerHTML="";
for (let i = 0; i < voices.length; i++) {
if (
voices[i].lang.startsWith(
language_select.value.substring(0, 2)
)
) {
const option = document.createElement("option");
option.textContent = `${voices[i].name} (${voices[i].lang})`;

if (voices[i].default) {
option.textContent += " — DEFAULT";
}
}
voices = [];
function populateVoicesList() {
voices = synth.getVoices();
voice_select = document.getElementById("voice")
voice_select.innerHTML = "";
for (let i = 0; i < voices.length; i++) {
if (
voices[i].lang.startsWith(
language_select.value.substring(0, 2)
)
) {
const option = document.createElement("option");
option.textContent = `${voices[i].name} (${voices[i].lang})`;

option.setAttribute("data-lang", voices[i].lang);
option.setAttribute("data-name", voices[i].name);
voice_select.appendChild(option);
if (voices[i].default) {
option.textContent += " — DEFAULT";
}

option.setAttribute("data-lang", voices[i].lang);
option.setAttribute("data-name", voices[i].name);
voice_select.appendChild(option);
}
}
voice_select.addEventListener("change", function () {
});
}
voice_select.addEventListener("change", function () {
});
}
// Audio code
function splitString(string, maxLength) {
const sentences = string.match(/[^.!?]+[.!?]/g);
const strings = [];
let currentString = "";

if (sentences) {
for (const sentence of sentences) {
if (currentString.length + sentence.length > maxLength) {
strings.push(currentString);
currentString = "";
// Audio code
function splitString(string, maxLength) {
const sentences = string.match(/[^.!?]+[.!?]/g);
const strings = [];
let currentString = "";

if (sentences) {
for (const sentence of sentences) {
if (currentString.length + sentence.length > maxLength) {
strings.push(currentString);
currentString = "";
}

currentString += `${sentence} `;
}
} else {
strings.push(string);
}

currentString += `${sentence} `;
if (currentString) {
strings.push(currentString);
}
} else {
strings.push(string);

return strings;
}
function addListeners(button, utterThis) {
utterThis.onstart = (event) => {
isSpeaking = true;
button.style.backgroundColor = "red";
button.style.boxShadow = "2px 2px 0.5px #808080";
};

if (currentString) {
strings.push(currentString);
utterThis.onend = (event) => {
isSpeaking = false;
button.style.backgroundColor = "";
button.style.boxShadow = "";
};
}

return strings;
}
function addListeners(button, utterThis) {
utterThis.onstart = (event) => {
isSpeaking = true;
button.style.backgroundColor = "red";
button.style.boxShadow = "2px 2px 0.5px #808080";
};

utterThis.onend = (event) => {
isSpeaking = false;
button.style.backgroundColor = "";
button.style.boxShadow = "";
};
}
function attachAudio_modules(div) {
if (div.parentNode.getElementsByClassName("audio-out-button").length > 0) {
return;
}
const audio_out_button = document.createElement("button");
audio_out_button.id = "audio-out-button";
audio_out_button.classList.add("audio_btn");
audio_out_button.innerHTML = "🕪";
div.classList.add("flex-1");
audio_out_button.classList.add("audio-out-button");
div.appendChild(audio_out_button);

function attachAudio_modules(div) {
if (div.parentNode.getElementsByClassName("audio-out-button").length > 0) {
return;
}
const audio_out_button = document.createElement("button");
audio_out_button.id = "audio-out-button";
audio_out_button.classList.add("audio_btn");
audio_out_button.innerHTML = "🕪";
div.classList.add("flex-1");
audio_out_button.classList.add("audio-out-button");
div.appendChild(audio_out_button);

function play_audio() {
if (isSpeaking) {

audio_out_button.style.backgroundColor = "";
audio_out_button.style.boxShadow = "";
synth.cancel();
isSpeaking = false;
} else {
isSpeaking = true;
text = audio_out_button.previousSibling.textContent;

const selectedOption =
voice_select.selectedOptions[0].getAttribute("data-name");
var selectedVoice = null;
for (let i = 0; i < voices.length; i++) {
if (voices[i].name === selectedOption) {
selectedVoice = voices[i];
}
}
if (selectedVoice && selectedVoice.voiceURI === "native") {
const utterThis = new SpeechSynthesisUtterance(text);
utterThis.voice = selectedVoice;
addListeners(audio_out_button, utterThis);
synth.speak(utterThis);
function play_audio() {
if (isSpeaking) {

audio_out_button.style.backgroundColor = "";
audio_out_button.style.boxShadow = "";
synth.cancel();
isSpeaking = false;
} else {
texts = splitString(text, 200);
texts.forEach((text) => {
isSpeaking = true;
text = audio_out_button.previousSibling.textContent;

const selectedOption =
voice_select.selectedOptions[0].getAttribute("data-name");
var selectedVoice = null;
for (let i = 0; i < voices.length; i++) {
if (voices[i].name === selectedOption) {
selectedVoice = voices[i];
}
}
if (selectedVoice && selectedVoice.voiceURI === "native") {
const utterThis = new SpeechSynthesisUtterance(text);
utterThis.voice = selectedVoice;
addListeners(audio_out_button, utterThis);
synth.speak(utterThis);
});
} else {
texts = splitString(text, 200);
texts.forEach((text) => {
const utterThis = new SpeechSynthesisUtterance(text);
utterThis.voice = selectedVoice;
addListeners(audio_out_button, utterThis);
synth.speak(utterThis);
});
}
}
}
audio_out_button.addEventListener("click", () => {
play_audio();
});
// TODO : activate using configuration file
//if (global["auto_audio"]) {
// play_audio();
//}
}
audio_out_button.addEventListener("click", () => {
play_audio();
});
// TODO : activate using configuration file
//if (global["auto_audio"]) {
// play_audio();
//}
}

function add_audio_in_ui() {
function add_audio_in_ui() {
const inputs = document.querySelectorAll("#user-input");
inputs.forEach((input) => {
// const wrapper = document.createElement("div");
// wrapper.classList.add("flex", "items-center");
var btn = document.querySelectorAll("#audio_in_tool");

var found = false;
// Iterate through the children
for (var i = 0; i < btn.length; i++) {
Expand All @@ -147,25 +149,25 @@ function add_audio_in_ui() {
found = true;
}
}


if (!found) {
const audio_in_button = document.createElement("button");
audio_in_button.id = "audio_in_tool";
audio_in_button.classList.add("audio_btn");
audio_in_button.innerHTML = "🎤";

input.parentNode.parentNode.appendChild(
audio_in_button
);

input.classList.add("flex-1");
audio_in_button.classList.add("ml-2");
//wrapper.appendChild(audio_in_button);
//input.parentNode.parentNode.insertBefore(wrapper, input);
//input.parentNode.removeChild(input);
//wrapper.appendChild(input);

audio_in_button.addEventListener("click", () => {
if (isStarted) {
recognition.stop();
Expand All @@ -176,7 +178,7 @@ function add_audio_in_ui() {
isStarted = true;
}
});

recognition.addEventListener("result", (event) => {
let transcript = "";
for (const result of event.results) {
Expand All @@ -186,16 +188,17 @@ function add_audio_in_ui() {
input.value = transcript;
}
});

recognition.addEventListener("start", () => {
audio_in_button.style.backgroundColor = "red";
audio_in_button.style.boxShadow = "2px 2px 0.5px #808080";
});

recognition.addEventListener("end", () => {
audio_in_button.style.backgroundColor = "";
audio_in_button.style.boxShadow = "";
});
}
});
}
}
}
Loading

0 comments on commit 009927a

Please sign in to comment.