Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Search for secret key in mod parent folder #319

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/config/config_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(self, file_name='config.ini'):
config_value.parse(each_value)
except:
create_back_up_configini = True
# TODO: filter out warnings for ['game', 'skyrim_mod_folder', 'skyrimvr_mod_folder', 'fallout4_mod_folder', 'fallout4vr_mod_folder', fallout4vr_folder]
logging.warning(f"Could not identify config value '{each_key} = {each_value}' in current config.ini. Value will not be loaded. A backup of this config.ini will be created.")

if create_back_up_configini:
Expand Down
13 changes: 10 additions & 3 deletions src/llm/openai_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from src.llm.message_thread import message_thread
from src.llm.messages import message
from src.config.config_loader import ConfigLoader
import os
import sys
from pathlib import Path

class openai_client:
"""Joint setup for sync and async access to the LLMs
Expand Down Expand Up @@ -64,8 +65,14 @@ def auto_resolve_endpoint(model_name, endpoints):
if (endpoint == 'none') or ("https" in endpoint):
#cloud LLM
self.__is_local: bool = False
with open(secret_key_file, 'r') as f:
self.__api_key: str = f.readline().strip()

try: # first check mod folder for secret key
mod_parent_folder = str(Path(utils.resolve_path()).parent.parent.parent)
with open(mod_parent_folder+'\\'+secret_key_file, 'r') as f:
self.__api_key: str = f.readline().strip()
except: # check locally (same folder as exe) for secret key
with open(secret_key_file, 'r') as f:
self.__api_key: str = f.readline().strip()

if not self.__api_key:
game_installation_page = 'https://art-from-the-machine.github.io/Mantella/pages/installation.html#language-models-llms'
Expand Down
8 changes: 6 additions & 2 deletions src/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,12 @@ def get_language_info(file_name) -> dict[Hashable, str]:
setup_logging(save_folder+logging_file)
config = config_loader.ConfigLoader(save_folder+config_file)
config.save_folder = save_folder
logging.log(23, f'Mantella.exe running in {os.getcwd()}. config.ini, logging.log, and conversation histories available in {save_folder}.')
logging.log(23, f'Mantella currently running for {config.game}. Mantella mod files located in {config.mod_path}.')
logging.log(23, f'''Mantella.exe running in:
{os.getcwd()}
config.ini, logging.log, and conversation histories available in:
{save_folder}''')
logging.log(23, f'''Mantella currently running for {config.game}. Mantella mod files located in:
{config.mod_path}''')
if not config.have_all_config_values_loaded_correctly:
logging.error("Cannot start Mantella. Not all settings that are required are set to correct values. Please check the above error messages and correct the corresponding settings!")

Expand Down
20 changes: 13 additions & 7 deletions src/stt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import requests
import json
import io
from pathlib import Path

class Transcriber:
def __init__(self, config, secret_key_file: str):
Expand Down Expand Up @@ -58,17 +59,22 @@ def __init__(self, config, secret_key_file: str):
self.transcribe_model = WhisperModel(self.model, device=self.process_device, compute_type="float32")

def __get_api_key(self) -> str:
if not self.__api_key:
with open(self.__secret_key_file, 'r') as f:
self.__api_key: str | None = f.readline().strip()
if not self.__api_key:
try: # first check mod folder for secret key
mod_parent_folder = str(Path(utils.resolve_path()).parent.parent.parent)
with open(mod_parent_folder+'\\'+self.__secret_key_file, 'r') as f:
self.__api_key: str = f.readline().strip()
except: # check locally (same folder as exe) for secret key
with open(self.__secret_key_file, 'r') as f:
self.__api_key: str = f.readline().strip()

if not self.__api_key:
logging.error(f'''No secret key found in MantellaSoftware/GPT_SECRET_KEY.txt. Please create a secret key and paste it in your Mantella mod folder's SKSE/Plugins/MantellaSoftware/GPT_SECRET_KEY.txt file.
if not self.__api_key:
logging.error(f'''No secret key found in MantellaSoftware/GPT_SECRET_KEY.txt. Please create a secret key and paste it in your Mantella mod folder's SKSE/Plugins/MantellaSoftware/GPT_SECRET_KEY.txt file.
If you are using OpenRouter (default), you can create a secret key in Account -> Keys once you have created an account: https://openrouter.ai/
If using OpenAI, see here on how to create a secret key: https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
If you are running a model locally, please ensure the service (Kobold / Text generation web UI) is running.''')
input("Press Enter to continue.")
sys.exit(0)
input("Press Enter to continue.")
sys.exit(0)
return self.__api_key

# def get_player_response(self, say_goodbye, prompt: str):
Expand Down