diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index e804ccedd85..b3bce3624c2 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -86,41 +86,60 @@ MAX_HISTORY_SECONDS = 60 * 60 # 1 hour MODEL_NAMES = [ # https://ollama.com/library "alfred", "all-minilm", + "aya", "bakllava", "codebooga", + "codegeex4", "codegemma", "codellama", "codeqwen", + "codestral", "codeup", "command-r", "command-r-plus", "dbrx", "deepseek-coder", + "deepseek-coder-v2", "deepseek-llm", + "deepseek-v2", + "dolphincoder", "dolphin-llama3", "dolphin-mistral", "dolphin-mixtral", "dolphin-phi", - "dolphincoder", "duckdb-nsql", "everythinglm", "falcon", + "falcon2", + "firefunction-v2", "gemma", + "gemma2", + "glm4", "goliath", - "llama-pro", + "granite-code", + "internlm2", "llama2", "llama2-chinese", "llama2-uncensored", "llama3", + "llama3-chatqa", + "llama3-gradient", + "llama3-groq-tool-use", + "llama-pro", "llava", + "llava-llama3", + "llava-phi3", "magicoder", + "mathstral", "meditron", "medllama2", "megadolphin", "mistral", - "mistral-openorca", "mistrallite", + "mistral-nemo", + "mistral-openorca", "mixtral", + "moondream", "mxbai-embed-large", "neural-chat", "nexusraven", @@ -130,36 +149,38 @@ MODEL_NAMES = [ # https://ollama.com/library "nous-hermes", "nous-hermes2", "nous-hermes2-mixtral", - "open-orca-platypus2", + "nuextract", "openchat", "openhermes", - "orca-mini", + "open-orca-platypus2", "orca2", + "orca-mini", "phi", "phi3", "phind-codellama", "qwen", + "qwen2", "samantha-mistral", "snowflake-arctic-embed", "solar", "sqlcoder", "stable-beluga", "stable-code", - "stablelm-zephyr", "stablelm2", + "stablelm-zephyr", "starcoder", "starcoder2", "starling-lm", "tinydolphin", "tinyllama", "vicuna", + "wizardcoder", + "wizardlm", + "wizardlm2", + "wizardlm-uncensored", "wizard-math", "wizard-vicuna", "wizard-vicuna-uncensored", - "wizardcoder", - "wizardlm", - "wizardlm-uncensored", - "wizardlm2", "xwinlm", "yarn-llama2", "yarn-mistral", diff --git a/homeassistant/components/ollama/manifest.json b/homeassistant/components/ollama/manifest.json index 7afaaa3dbd4..f7265d87aab 100644 --- a/homeassistant/components/ollama/manifest.json +++ b/homeassistant/components/ollama/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/ollama", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["ollama-hass==0.1.7"] + "requirements": ["ollama==0.3.0"] } diff --git a/homeassistant/components/ollama/models.py b/homeassistant/components/ollama/models.py index ce0f858bb8c..56cc552fad1 100644 --- a/homeassistant/components/ollama/models.py +++ b/homeassistant/components/ollama/models.py @@ -29,7 +29,7 @@ class MessageHistory: @property def num_user_messages(self) -> int: """Return a count of user messages.""" - return sum(m["role"] == MessageRole.USER for m in self.messages) + return sum(m["role"] == MessageRole.USER.value for m in self.messages) @dataclass(frozen=True) diff --git a/requirements_all.txt b/requirements_all.txt index 010f0ed32a0..09c147d4948 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1463,7 +1463,7 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ollama -ollama-hass==0.1.7 +ollama==0.3.0 # homeassistant.components.omnilogic omnilogic==0.4.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 54079d42273..c05868bbb7d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1199,7 +1199,7 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ollama -ollama-hass==0.1.7 +ollama==0.3.0 # homeassistant.components.omnilogic omnilogic==0.4.5