概览
Mem0 内置了对多种流行的大型语言模型的支持。它可以利用用户提供的大型语言模型,确保针对特定需求的高效使用。
- OpenAI
- Groq
- Together
- AWS Bedrock
- Litellm
- Google AI
- Anthropic
- Mistral AI
- OpenAI Azure
OpenAI
import os
from mem0 import Memory
os.environ["OPENAI_API_KEY"] = "your-api-key"
config = {
"llm": {
"provider": "openai",
"config": {
"model": "gpt-4o",
"temperature": 0.2,
"max_tokens": 1500,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
Groq
import os
from mem0 import Memory
os.environ["GROQ_API_KEY"] = "your-api-key"
config = {
"llm": {
"provider": "groq",
"config": {
"model": "mixtral-8x7b-32768",
"temperature": 0.1,
"max_tokens": 1000,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
Together
import os
from mem0 import Memory
os.environ["TOGETHER_API_KEY"] = "your-api-key"
config = {
"llm": {
"provider": "togetherai",
"config": {
"model": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"temperature": 0.2,
"max_tokens": 1500,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
AWS Bedrock
import os
from mem0 import Memory
os.environ['AWS_REGION'] = 'us-east-1'
os.environ["AWS_ACCESS_KEY"] = "xx"
os.environ["AWS_SECRET_ACCESS_KEY"] = "xx"
config = {
"llm": {
"provider": "aws_bedrock",
"config": {
"model": "arn:aws:bedrock:us-east-1:123456789012:model/your-model-name",
"temperature": 0.2,
"max_tokens": 1500,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
Litellm
import os
from mem0 import Memory
os.environ["OPENAI_API_KEY"] = "your-api-key"
config = {
"llm": {
"provider": "litellm",
"config": {
"model": "gpt-3.5-turbo",
"temperature": 0.2,
"max_tokens": 1500,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
Google AI
import os
from mem0 import Memory
os.environ["GEMINI_API_KEY"] = "your-api-key"
config = {
"llm": {
"provider": "litellm",
"config": {
"model": "gemini/gemini-pro",
"temperature": 0.2,
"max_tokens": 1500,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
Anthropic
import os
from mem0 import Memory
os.environ["ANTHROPIC_API_KEY"] = "your-api-key"
config = {
"llm": {
"provider": "litellm",
"config": {
"model": "claude-3-opus-20240229",
"temperature": 0.1,
"max_tokens": 2000,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
Mistral AI
import os
from mem0 import Memory
os.environ["MISTRAL_API_KEY"] = "your-api-key"
config = {
"llm": {
"provider": "litellm",
"config": {
"model": "open-mixtral-8x7b",
"temperature": 0.1,
"max_tokens": 2000,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
OpenAI Azure
import os
from mem0 import Memory
os.environ["AZURE_API_KEY"] = "your-api-key"
# Needed to use custom models
os.environ["AZURE_API_BASE"] = "your-api-base-url"
os.environ["AZURE_API_VERSION"] = "version-to-use"
config = {
"llm": {
"provider": "litellm",
"config": {
"model": "azure_ai/command-r-plus",
"temperature": 0.1,
"max_tokens": 2000,
}
}
}
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})