Basic Usage
- Python SDK
- JavaScript SDK
Copy
from gravixlayer import GravixLayer
client = GravixLayer()
# Initialize memory with all required parameters
memory = client.memory(
embedding_model="baai/bge-large-en-v1.5",
inference_model="google/gemma-3-12b-it",
index_name="my_memories",
cloud_provider="AWS",
region="us-east-1"
)
# Add simple text
result = memory.add("I love pizza", user_id="alice")
print(f"Added memory: {result['results'][0]['memory']}")
print(f"Memory ID: {result['results'][0]['id']}")
# Add with metadata
result = memory.add("User prefers dark mode", user_id="alice", metadata={"type": "preference"})
print(f"Added preference: {result['results'][0]['memory']}")
print(f"Metadata: {result['results'][0]['metadata']}")
# Get all memories to verify
all_memories = memory.get_all(user_id="alice")
print(f"\nTotal memories for alice: {len(all_memories['results'])}")
for i, mem in enumerate(all_memories['results'], 1):
print(f"{i}. {mem['memory']}")
if mem.get('metadata'):
print(f" Metadata: {mem['metadata']}")
Copy
Added memory: I love pizza
Memory ID: b355d0d2-3eaa-4bc6-a61b-48ee615279bf
Added preference: User prefers dark mode
Metadata: {'type': 'preference'}
Total memories for alice: 2
1. I love pizza
2. User prefers dark mode
Metadata: {'type': 'preference'}
Copy
import { GravixLayer, Memory } from 'gravixlayer';
const client = new GravixLayer();
// Initialize memory with all required parameters
const memory = new Memory(
client,
'baai/bge-large-en-v1.5',
'google/gemma-3-12b-it',
'my_memories',
'AWS',
'us-east-1'
);
async function addMemories() {
// Add simple text
const result1 = await memory.add("I love pizza", "alice");
console.log(`Added memory: ${result1.results[0].memory}`);
console.log(`Memory ID: ${result1.results[0].id}`);
// Add with metadata
const result2 = await memory.add("User prefers dark mode", "alice", {
metadata: {type: "preference"}
});
console.log(`Added preference: ${result2.results[0].memory}`);
console.log(`Metadata: ${JSON.stringify(result2.results[0].metadata)}`);
// Get all memories to verify
const allMemories = await memory.getAll("alice");
console.log(`\nTotal memories for alice: ${allMemories.results.length}`);
allMemories.results.forEach((mem, index) => {
console.log(`${index + 1}. ${mem.memory}`);
if (mem.metadata) {
console.log(` Metadata: ${JSON.stringify(mem.metadata)}`);
}
});
}
addMemories().catch(console.error);
Copy
Added memory: I love pizza
Memory ID: b355d0d2-3eaa-4bc6-a61b-48ee615279bf
Added preference: User prefers dark mode
Metadata: {"type":"preference"}
Total memories for alice: 2
1. I love pizza
2. User prefers dark mode
Metadata: {"type":"preference"}
Add Conversations
Store entire conversations and let AI extract key memories:- Python SDK
- JavaScript SDK
Copy
from gravixlayer import GravixLayer
client = GravixLayer()
# Initialize memory
memory = client.memory(
embedding_model="baai/bge-large-en-v1.5",
inference_model="google/gemma-3-12b-it",
index_name="conversations",
cloud_provider="AWS",
region="us-east-1"
)
# Store a conversation with AI inference
conversation = [
{"role": "user", "content": "I'm planning to watch a movie tonight. Any recommendations?"},
{"role": "assistant", "content": "How about thriller movies? They can be quite engaging."},
{"role": "user", "content": "I'm not a big fan of thriller movies but I love sci-fi movies."},
{"role": "assistant", "content": "Got it! I'll avoid thriller recommendations and suggest sci-fi movies in the future."}
]
result = memory.add(conversation, user_id="alice", infer=True, metadata={"type": "conversation"})
print(f"AI extracted {len(result['results'])} memories from conversation:")
for i, extracted_memory in enumerate(result['results'], 1):
print(f"{i}. {extracted_memory['memory']}")
print(f" ID: {extracted_memory['id']}")
if extracted_memory.get('metadata'):
print(f" Metadata: {extracted_memory['metadata']}")
# Verify by searching for movie preferences
search_results = memory.search("movie preferences", user_id="alice")
print(f"\nFound {len(search_results['results'])} movie-related memories:")
for result in search_results['results']:
print(f"- {result['memory']}")
Copy
AI extracted 2 memories from conversation:
1. User prefers sci-fi movies
ID: c455d0d2-3eaa-4bc6-a61b-48ee615279bf
Metadata: {'type': 'conversation'}
2. User dislikes thriller movies
ID: d755d0d2-3eaa-4bc6-a61b-48ee615279bf
Metadata: {'type': 'conversation'}
Found 2 movie-related memories:
- User prefers sci-fi movies
- User dislikes thriller movies
Copy
import { GravixLayer, Memory } from 'gravixlayer';
const client = new GravixLayer();
// Initialize memory
const memory = new Memory(
client,
'baai/bge-large-en-v1.5',
'google/gemma-3-12b-it',
'conversations',
'AWS',
'us-east-1'
);
async function addConversation() {
// Store a conversation with AI inference
const conversation = [
{role: "user", content: "I'm planning to watch a movie tonight. Any recommendations?"},
{role: "assistant", content: "How about thriller movies? They can be quite engaging."},
{role: "user", content: "I'm not a big fan of thriller movies but I love sci-fi movies."},
{role: "assistant", content: "Got it! I'll avoid thriller recommendations and suggest sci-fi movies in the future."}
];
const result = await memory.add(conversation, "alice", {
infer: true,
metadata: {type: "conversation"}
});
console.log(`AI extracted ${result.results.length} memories from conversation:`);
result.results.forEach((extractedMemory, index) => {
console.log(`${index + 1}. ${extractedMemory.memory}`);
console.log(` ID: ${extractedMemory.id}`);
if (extractedMemory.metadata) {
console.log(` Metadata: ${JSON.stringify(extractedMemory.metadata)}`);
}
});
// Verify by searching for movie preferences
const searchResults = await memory.search("movie preferences", "alice");
console.log(`\nFound ${searchResults.results.length} movie-related memories:`);
searchResults.results.forEach(result => {
console.log(`- ${result.memory}`);
});
}
addConversation().catch(console.error);
Copy
AI extracted 2 memories from conversation:
1. User prefers sci-fi movies
ID: c455d0d2-3eaa-4bc6-a61b-48ee615279bf
Metadata: {"type":"conversation"}
2. User dislikes thriller movies
ID: d755d0d2-3eaa-4bc6-a61b-48ee615279bf
Metadata: {"type":"conversation"}
Found 2 movie-related memories:
- User prefers sci-fi movies
- User dislikes thriller movies
Multilingual Support
Initialize memory with multilingual embedding model for multi-language support:Configuration Parameters
All parameters are REQUIRED:embedding_model- How text gets converted to searchable vectors (required)inference_model- AI model that extracts memories from conversations (required)index_name- Where memories are stored (like folders) (required)cloud_provider- Where your data is hosted (required)region- Cloud region (required)delete_protection- Protect index from deletion (optional, default: False)
- Python SDK
- JavaScript SDK
Copy
from gravixlayer import GravixLayer
client = GravixLayer()
# Initialize with multilingual support
memory = client.memory(
embedding_model="microsoft/multilingual-e5-large", # Supports 100+ languages
inference_model="google/gemma-3-12b-it",
index_name="user_preferences",
cloud_provider="GCP",
region="us-central1",
delete_protection=False # Optional, defaults to False
)
# Now works with any language
result1 = memory.add("El usuario prefiere pizza", user_id="alice")
result2 = memory.add("L'utilisateur aime le café", user_id="alice")
result3 = memory.add("用户喜欢寿司", user_id="alice")
print("Added multilingual memories:")
print(f"Spanish: {result1['results'][0]['memory']}")
print(f"French: {result2['results'][0]['memory']}")
print(f"Chinese: {result3['results'][0]['memory']}")
# Check current configuration
config = memory.get_current_configuration()
print(f"\nCurrent configuration:")
print(f"Embedding model: {config['embedding_model']}")
print(f"Inference model: {config['inference_model']}")
print(f"Index name: {config['index_name']}")
# Search works across all languages
search_results = memory.search("food preferences", user_id="alice")
print(f"\nFound {len(search_results['results'])} food-related memories:")
for result in search_results['results']:
print(f"- {result['memory']}")
Copy
Added multilingual memories:
Spanish: El usuario prefiere pizza
French: L'utilisateur aime le café
Chinese: 用户喜欢寿司
Current configuration:
Embedding model: microsoft/multilingual-e5-large
Inference model: google/gemma-3-12b-it
Index name: user_preferences
Found 3 food-related memories:
- El usuario prefiere pizza
- L'utilisateur aime le café
- 用户喜欢寿司
Copy
import { GravixLayer, Memory } from 'gravixlayer';
const client = new GravixLayer();
// Initialize with multilingual support
const memory = new Memory(
client,
'microsoft/multilingual-e5-large', // Supports 100+ languages
'google/gemma-3-12b-it',
'user_preferences',
'GCP',
'us-central1',
false // deleteProtection - optional, defaults to false
);
async function setupMultilingualMemory() {
// Now works with any language
const result1 = await memory.add("El usuario prefiere pizza", "alice");
const result2 = await memory.add("L'utilisateur aime le café", "alice");
const result3 = await memory.add("用户喜欢寿司", "alice");
console.log("Added multilingual memories:");
console.log(`Spanish: ${result1.results[0].memory}`);
console.log(`French: ${result2.results[0].memory}`);
console.log(`Chinese: ${result3.results[0].memory}`);
// Check current configuration
const config = memory.getCurrentConfiguration();
console.log(`\nCurrent configuration:`);
console.log(`Embedding model: ${config.embedding_model}`);
console.log(`Inference model: ${config.inference_model}`);
console.log(`Index name: ${config.index_name}`);
// Search works across all languages
const searchResults = await memory.search("food preferences", "alice");
console.log(`\nFound ${searchResults.results.length} food-related memories:`);
searchResults.results.forEach(result => {
console.log(`- ${result.memory}`);
});
}
setupMultilingualMemory().catch(console.error);
Copy
Added multilingual memories:
Spanish: El usuario prefiere pizza
French: L'utilisateur aime le café
Chinese: 用户喜欢寿司
Current configuration:
Embedding model: microsoft/multilingual-e5-large
Inference model: google/gemma-3-12b-it
Index name: user_preferences
Found 3 food-related memories:
- El usuario prefiere pizza
- L'utilisateur aime le café
- 用户喜欢寿司

