Update OpenAI defaults (#118059)

* Update OpenAI defaults

* Update max temperature
This commit is contained in:
Paulus Schoutsen 2024-05-24 15:37:44 -04:00 committed by GitHub
parent ffc3560dad
commit 3b2cdb63f1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 18 additions and 15 deletions

View file

@ -145,6 +145,16 @@ def openai_config_option_schema(
) )
return { return {
vol.Optional(
CONF_PROMPT,
description={"suggested_value": options.get(CONF_PROMPT)},
default=DEFAULT_PROMPT,
): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
default="none",
): SelectSelector(SelectSelectorConfig(options=apis)),
vol.Optional( vol.Optional(
CONF_CHAT_MODEL, CONF_CHAT_MODEL,
description={ description={
@ -153,16 +163,6 @@ def openai_config_option_schema(
}, },
default=DEFAULT_CHAT_MODEL, default=DEFAULT_CHAT_MODEL,
): str, ): str,
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
default="none",
): SelectSelector(SelectSelectorConfig(options=apis)),
vol.Optional(
CONF_PROMPT,
description={"suggested_value": options.get(CONF_PROMPT)},
default=DEFAULT_PROMPT,
): TemplateSelector(),
vol.Optional( vol.Optional(
CONF_MAX_TOKENS, CONF_MAX_TOKENS,
description={"suggested_value": options.get(CONF_MAX_TOKENS)}, description={"suggested_value": options.get(CONF_MAX_TOKENS)},
@ -177,5 +177,5 @@ def openai_config_option_schema(
CONF_TEMPERATURE, CONF_TEMPERATURE,
description={"suggested_value": options.get(CONF_TEMPERATURE)}, description={"suggested_value": options.get(CONF_TEMPERATURE)},
default=DEFAULT_TEMPERATURE, default=DEFAULT_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)), ): NumberSelector(NumberSelectorConfig(min=0, max=2, step=0.05)),
} }

View file

@ -23,10 +23,10 @@ An overview of the areas and the devices in this smart home:
{%- endfor %} {%- endfor %}
""" """
CONF_CHAT_MODEL = "chat_model" CONF_CHAT_MODEL = "chat_model"
DEFAULT_CHAT_MODEL = "gpt-3.5-turbo" DEFAULT_CHAT_MODEL = "gpt-4o"
CONF_MAX_TOKENS = "max_tokens" CONF_MAX_TOKENS = "max_tokens"
DEFAULT_MAX_TOKENS = 150 DEFAULT_MAX_TOKENS = 150
CONF_TOP_P = "top_p" CONF_TOP_P = "top_p"
DEFAULT_TOP_P = 1 DEFAULT_TOP_P = 1.0
CONF_TEMPERATURE = "temperature" CONF_TEMPERATURE = "temperature"
DEFAULT_TEMPERATURE = 0.5 DEFAULT_TEMPERATURE = 1.0

View file

@ -17,12 +17,15 @@
"step": { "step": {
"init": { "init": {
"data": { "data": {
"prompt": "Prompt Template", "prompt": "Instructions",
"chat_model": "[%key:common::generic::model%]", "chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response", "max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature", "temperature": "Temperature",
"top_p": "Top P", "top_p": "Top P",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]" "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
} }
} }
} }