MemGPT/letta/local_llm/ollama/settings.py
Sarah Wooders 85faf5f474
chore: migrate package name to letta (#1775)
Co-authored-by: Charles Packer <packercharles@gmail.com>
Co-authored-by: Shubham Naik <shubham.naik10@gmail.com>
Co-authored-by: Shubham Naik <shub@memgpt.ai>
2024-09-23 09:15:18 -07:00

33 lines
853 B
Python

# see https://github.com/jmorganca/ollama/blob/main/docs/api.md
# and https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values
SIMPLE = {
"options": {
"stop": [
"\nUSER:",
"\nASSISTANT:",
"\nFUNCTION RETURN:",
"\nUSER",
"\nASSISTANT",
"\nFUNCTION RETURN",
"\nFUNCTION",
"\nFUNC",
"<|im_start|>",
"<|im_end|>",
"<|im_sep|>",
# '\n' +
# '</s>',
# '<|',
# '\n#',
# '\n\n\n',
],
# "num_ctx": LLM_MAX_TOKENS,
},
"stream": False,
# turn off Ollama's own prompt formatting
"system": "",
"template": "{{ .Prompt }}",
# "system": None,
# "template": None,
"context": None,
}