Home Assistant Unofficial Reference
2024.12.1
const.py
Go to the documentation of this file.
1
"""Constants for the Ollama integration."""
2
3
DOMAIN =
"ollama"
4
5
CONF_MODEL =
"model"
6
CONF_PROMPT =
"prompt"
7
8
CONF_KEEP_ALIVE =
"keep_alive"
9
DEFAULT_KEEP_ALIVE = -1
# seconds. -1 = indefinite, 0 = never
10
11
KEEP_ALIVE_FOREVER = -1
12
DEFAULT_TIMEOUT = 5.0
# seconds
13
14
CONF_NUM_CTX =
"num_ctx"
15
DEFAULT_NUM_CTX = 8192
16
MIN_NUM_CTX = 2048
17
MAX_NUM_CTX = 131072
18
19
CONF_MAX_HISTORY =
"max_history"
20
DEFAULT_MAX_HISTORY = 20
21
22
MAX_HISTORY_SECONDS = 60 * 60
# 1 hour
23
24
MODEL_NAMES = [
# https://ollama.com/library
25
"alfred"
,
26
"all-minilm"
,
27
"aya-expanse"
,
28
"aya"
,
29
"bakllava"
,
30
"bespoke-minicheck"
,
31
"bge-large"
,
32
"bge-m3"
,
33
"codebooga"
,
34
"codegeex4"
,
35
"codegemma"
,
36
"codellama"
,
37
"codeqwen"
,
38
"codestral"
,
39
"codeup"
,
40
"command-r-plus"
,
41
"command-r"
,
42
"dbrx"
,
43
"deepseek-coder-v2"
,
44
"deepseek-coder"
,
45
"deepseek-llm"
,
46
"deepseek-v2.5"
,
47
"deepseek-v2"
,
48
"dolphin-llama3"
,
49
"dolphin-mistral"
,
50
"dolphin-mixtral"
,
51
"dolphin-phi"
,
52
"dolphincoder"
,
53
"duckdb-nsql"
,
54
"everythinglm"
,
55
"falcon"
,
56
"falcon2"
,
57
"firefunction-v2"
,
58
"gemma"
,
59
"gemma2"
,
60
"glm4"
,
61
"goliath"
,
62
"granite-code"
,
63
"granite3-dense"
,
64
"granite3-guardian"
"granite3-moe"
,
65
"hermes3"
,
66
"internlm2"
,
67
"llama-guard3"
,
68
"llama-pro"
,
69
"llama2-chinese"
,
70
"llama2-uncensored"
,
71
"llama2"
,
72
"llama3-chatqa"
,
73
"llama3-gradient"
,
74
"llama3-groq-tool-use"
,
75
"llama3.1"
,
76
"llama3.2"
,
77
"llama3"
,
78
"llava-llama3"
,
79
"llava-phi3"
,
80
"llava"
,
81
"magicoder"
,
82
"mathstral"
,
83
"meditron"
,
84
"medllama2"
,
85
"megadolphin"
,
86
"minicpm-v"
,
87
"mistral-large"
,
88
"mistral-nemo"
,
89
"mistral-openorca"
,
90
"mistral-small"
,
91
"mistral"
,
92
"mistrallite"
,
93
"mixtral"
,
94
"moondream"
,
95
"mxbai-embed-large"
,
96
"nemotron-mini"
,
97
"nemotron"
,
98
"neural-chat"
,
99
"nexusraven"
,
100
"nomic-embed-text"
,
101
"notus"
,
102
"notux"
,
103
"nous-hermes"
,
104
"nous-hermes2-mixtral"
,
105
"nous-hermes2"
,
106
"nuextract"
,
107
"open-orca-platypus2"
,
108
"openchat"
,
109
"openhermes"
,
110
"orca-mini"
,
111
"orca2"
,
112
"paraphrase-multilingual"
,
113
"phi"
,
114
"phi3.5"
,
115
"phi3"
,
116
"phind-codellama"
,
117
"qwen"
,
118
"qwen2-math"
,
119
"qwen2.5-coder"
,
120
"qwen2.5"
,
121
"qwen2"
,
122
"reader-lm"
,
123
"reflection"
,
124
"samantha-mistral"
,
125
"shieldgemma"
,
126
"smollm"
,
127
"smollm2"
,
128
"snowflake-arctic-embed"
,
129
"solar-pro"
,
130
"solar"
,
131
"sqlcoder"
,
132
"stable-beluga"
,
133
"stable-code"
,
134
"stablelm-zephyr"
,
135
"stablelm2"
,
136
"starcoder"
,
137
"starcoder2"
,
138
"starling-lm"
,
139
"tinydolphin"
,
140
"tinyllama"
,
141
"vicuna"
,
142
"wizard-math"
,
143
"wizard-vicuna-uncensored"
,
144
"wizard-vicuna"
,
145
"wizardcoder"
,
146
"wizardlm-uncensored"
,
147
"wizardlm"
,
148
"wizardlm2"
,
149
"xwinlm"
,
150
"yarn-llama2"
,
151
"yarn-mistral"
,
152
"yi-coder"
,
153
"yi"
,
154
"zephyr"
,
155
]
156
DEFAULT_MODEL =
"llama3.2:latest"
core
homeassistant
components
ollama
const.py
Generated by
1.9.1