Use gemini-1.5-flash-latest in google_generative_ai_conversation.generate_content (#118594)
This commit is contained in:
parent
7af469f81e
commit
a4612143e6
2 changed files with 3 additions and 4 deletions
|
@ -66,8 +66,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
}
|
||||
)
|
||||
|
||||
model_name = "gemini-pro-vision" if image_filenames else "gemini-pro"
|
||||
model = genai.GenerativeModel(model_name=model_name)
|
||||
model = genai.GenerativeModel(model_name=RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
try:
|
||||
response = await model.generate_content_async(prompt_parts)
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
tuple(
|
||||
),
|
||||
dict({
|
||||
'model_name': 'gemini-pro-vision',
|
||||
'model_name': 'models/gemini-1.5-flash-latest',
|
||||
}),
|
||||
),
|
||||
tuple(
|
||||
|
@ -32,7 +32,7 @@
|
|||
tuple(
|
||||
),
|
||||
dict({
|
||||
'model_name': 'gemini-pro',
|
||||
'model_name': 'models/gemini-1.5-flash-latest',
|
||||
}),
|
||||
),
|
||||
tuple(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue