Skip to main content
This section demonstrates adding simple memories (plain text or conversation messages) using the default memory configuration — no custom index or model required.
from gravixlayer import GravixLayer

client = GravixLayer()
memory = client.memory

# Add simple text
result = memory.add("I love pizza", user_id="alice")
print(f"Added memory: {result['results'][0]['memory']}")
print(f"Memory ID: {result['results'][0]['id']}")

# Add with metadata
result = memory.add("User prefers dark mode", user_id="alice", metadata={"type": "preference"})
print(f"Added preference: {result['results'][0]['memory']}")
print(f"Metadata: {result['results'][0]['metadata']}")

# Get all memories to verify
all_memories = memory.get_all(user_id="alice")
print(f"\nTotal memories for alice: {len(all_memories['results'])}")
for i, mem in enumerate(all_memories['results'], 1):
    print(f"{i}. {mem['memory']}")
    if mem.get('metadata'):
        print(f"   Metadata: {mem['metadata']}")
Expected Output:
Memory index 'gravixlayer_memories' not found
Embedding model: baai/bge-large-en-v1.5
Dimension: 1024
Cloud config: {'cloud_provider': 'AWS', 'region': 'us-east-1', 'index_type': 'serverless'}
Creating memory index...
Successfully created memory index: 1dc9e3a7-ffba-46b6-ad65-7aa3c55685e3
I love pizza
Extracted 2 memories from conversation

Add Memory with Custom Configuration

Custom Configuration: When you specify custom parameters, they override the defaults for that memory instance.
from gravixlayer import GravixLayer

client = GravixLayer()
memory = client.memory

# Store a conversation with AI inference
conversation = [
    {"role": "user", "content": "I'm planning to watch a movie tonight. Any recommendations?"},
    {"role": "assistant", "content": "How about thriller movies? They can be quite engaging."},
    {"role": "user", "content": "I'm not a big fan of thriller movies but I love sci-fi movies."},
    {"role": "assistant", "content": "Got it! I'll avoid thriller recommendations and suggest sci-fi movies in the future."}
]

result = memory.add(conversation, user_id="alice", infer=True, metadata={"type": "conversation"})
print(f"AI extracted {len(result['results'])} memories from conversation:")

for i, extracted_memory in enumerate(result['results'], 1):
    print(f"{i}. {extracted_memory['memory']}")
    print(f"   ID: {extracted_memory['id']}")
    if extracted_memory.get('metadata'):
        print(f"   Metadata: {extracted_memory['metadata']}")

# Verify by searching for movie preferences
search_results = memory.search("movie preferences", user_id="alice")
print(f"\nFound {len(search_results['results'])} movie-related memories:")
for result in search_results['results']:
    print(f"- {result['memory']}")