Skip to content

Commit

Permalink
remove old precision settings
Browse files Browse the repository at this point in the history
  • Loading branch information
blaisewf committed Jan 7, 2025
1 parent 837b945 commit 64d51a0
Show file tree
Hide file tree
Showing 7 changed files with 1 addition and 82 deletions.
1 change: 0 additions & 1 deletion rvc/configs/32000.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
"learning_rate": 1e-4,
"betas": [0.8, 0.99],
"eps": 1e-9,
"fp16_run": true,
"lr_decay": 0.999875,
"segment_size": 12800,
"c_mel": 45,
Expand Down
1 change: 0 additions & 1 deletion rvc/configs/40000.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
"learning_rate": 1e-4,
"betas": [0.8, 0.99],
"eps": 1e-9,
"fp16_run": true,
"lr_decay": 0.999875,
"segment_size": 12800,
"c_mel": 45,
Expand Down
1 change: 0 additions & 1 deletion rvc/configs/44100.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
"learning_rate": 0.0001,
"betas": [0.8, 0.99],
"eps": 1e-09,
"fp16_run": true,
"lr_decay": 0.999875,
"segment_size": 15876,
"c_mel": 45,
Expand Down
1 change: 0 additions & 1 deletion rvc/configs/48000.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
"learning_rate": 1e-4,
"betas": [0.8, 0.99],
"eps": 1e-9,
"fp16_run": true,
"lr_decay": 0.999875,
"segment_size": 17280,
"c_mel": 45,
Expand Down
34 changes: 0 additions & 34 deletions rvc/configs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,40 +42,6 @@ def load_config_json(self):
configs[config_file] = json.load(f)
return configs

def set_precision(self, precision):
if precision not in ["fp32", "fp16"]:
raise ValueError("Invalid precision type. Must be 'fp32' or 'fp16'.")

fp16_run_value = precision == "fp16"

for config_path in version_config_paths:
full_config_path = os.path.join("rvc", "configs", config_path)
try:
with open(full_config_path, "r") as f:
config = json.load(f)
config["train"]["fp16_run"] = fp16_run_value
with open(full_config_path, "w") as f:
json.dump(config, f, indent=4)
except FileNotFoundError:
print(f"File not found: {full_config_path}")

return f"Overwritten config to use {precision}."

def get_precision(self):
if not version_config_paths:
raise FileNotFoundError("No configuration paths provided.")

full_config_path = os.path.join("rvc", "configs", version_config_paths[0])
try:
with open(full_config_path, "r") as f:
config = json.load(f)
fp16_run_value = config["train"].get("fp16_run", False)
precision = "fp16" if fp16_run_value else "fp32"
return precision
except FileNotFoundError:
print(f"File not found: {full_config_path}")
return None

def device_config(self):
if self.device.startswith("cuda"):
self.set_cuda_config()
Expand Down
41 changes: 0 additions & 41 deletions tabs/settings/sections/precision.py

This file was deleted.

4 changes: 1 addition & 3 deletions tabs/settings/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
sys.path.append(now_dir)

from tabs.settings.sections.presence import presence_tab
from tabs.settings.sections.precision import precision_tab
from tabs.settings.sections.themes import theme_tab
from tabs.settings.sections.version import version_tab
from tabs.settings.sections.lang import lang_tab
Expand All @@ -21,6 +20,5 @@ def settings_tab():
version_tab()
lang_tab()
restart_tab()
with gr.TabItem(label="Inference & Training"):
precision_tab()
with gr.TabItem(label="Training"):
model_author_tab()

0 comments on commit 64d51a0

Please sign in to comment.