Skip to content

Commit

Permalink
Update OpenAI defaults (home-assistant#118059)
Browse files Browse the repository at this point in the history
* Update OpenAI defaults

* Update max temperature
  • Loading branch information
balloob authored May 24, 2024
1 parent ffc3560 commit 3b2cdb6
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 15 deletions.
22 changes: 11 additions & 11 deletions homeassistant/components/openai_conversation/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,16 @@ def openai_config_option_schema(
)

return {
vol.Optional(
CONF_PROMPT,
description={"suggested_value": options.get(CONF_PROMPT)},
default=DEFAULT_PROMPT,
): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
default="none",
): SelectSelector(SelectSelectorConfig(options=apis)),
vol.Optional(
CONF_CHAT_MODEL,
description={
Expand All @@ -153,16 +163,6 @@ def openai_config_option_schema(
},
default=DEFAULT_CHAT_MODEL,
): str,
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
default="none",
): SelectSelector(SelectSelectorConfig(options=apis)),
vol.Optional(
CONF_PROMPT,
description={"suggested_value": options.get(CONF_PROMPT)},
default=DEFAULT_PROMPT,
): TemplateSelector(),
vol.Optional(
CONF_MAX_TOKENS,
description={"suggested_value": options.get(CONF_MAX_TOKENS)},
Expand All @@ -177,5 +177,5 @@ def openai_config_option_schema(
CONF_TEMPERATURE,
description={"suggested_value": options.get(CONF_TEMPERATURE)},
default=DEFAULT_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
): NumberSelector(NumberSelectorConfig(min=0, max=2, step=0.05)),
}
6 changes: 3 additions & 3 deletions homeassistant/components/openai_conversation/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@
{%- endfor %}
"""
CONF_CHAT_MODEL = "chat_model"
DEFAULT_CHAT_MODEL = "gpt-3.5-turbo"
DEFAULT_CHAT_MODEL = "gpt-4o"
CONF_MAX_TOKENS = "max_tokens"
DEFAULT_MAX_TOKENS = 150
CONF_TOP_P = "top_p"
DEFAULT_TOP_P = 1
DEFAULT_TOP_P = 1.0
CONF_TEMPERATURE = "temperature"
DEFAULT_TEMPERATURE = 0.5
DEFAULT_TEMPERATURE = 1.0
5 changes: 4 additions & 1 deletion homeassistant/components/openai_conversation/strings.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,15 @@
"step": {
"init": {
"data": {
"prompt": "Prompt Template",
"prompt": "Instructions",
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature",
"top_p": "Top P",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
}
}
}
Expand Down

0 comments on commit 3b2cdb6

Please sign in to comment.