Learn how to use advanced search parameters for memory retrieval including reranking and relevance filtering.
Memory search in PraisonAI Agents provides advanced parameters for better control over search results, including reranking for improved relevance and cutoff thresholds for quality control.
from praisonaiagents import Memory# Initialize memory with ChromaDB (local storage)memory = Memory(config={ "provider": "rag", "use_embedding": True, "rag_db_path": ".praison/memory_db"})# Store informationmemory.store_long_term("Paris is the capital of France")memory.store_long_term("Tokyo is the capital of Japan")memory.store_long_term("Berlin is the capital of Germany")# Search with relevance cutoffresults = memory.search_long_term( "What is the capital of France?", relevance_cutoff=0.7, # Only return results above 70% relevance limit=5)for result in results: print(f"Memory: {result['memory']}") print(f"Relevance: {result.get('score', 'N/A')}") print("-" * 50)
ChromaDB is the default local storage provider that supports relevance filtering:
Copy
from praisonaiagents import Memory# Initialize ChromaDB memorymemory = Memory(config={ "provider": "rag", "use_embedding": True, "rag_db_path": ".praison/memory_db"})# Store memories with metadatamemory.store_long_term( "The Eiffel Tower is 330 meters tall", metadata={"category": "landmarks", "city": "Paris"})memory.store_long_term( "The Statue of Liberty is 93 meters tall", metadata={"category": "landmarks", "city": "New York"})# Search with relevance cutoffresults = memory.search_long_term( "How tall is the Eiffel Tower?", relevance_cutoff=0.6, # Filter out low-relevance results limit=10)# ChromaDB calculates score as: 1.0 - distance# Higher scores mean better relevance
from praisonaiagents import Memory# Create a knowledge baseknowledge_memory = Memory(config={ "provider": "rag", "use_embedding": True})# Store various factsfacts = [ "Python was created by Guido van Rossum in 1991", "JavaScript was created by Brendan Eich in 1995", "Java was created by James Gosling in 1995", "C++ was created by Bjarne Stroustrup in 1985", "Ruby was created by Yukihiro Matsumoto in 1995"]for fact in facts: knowledge_memory.store_long_term(fact)# Search with different relevance thresholdsquery = "Who created Python?"# High relevance - only direct matchesstrict_results = knowledge_memory.search_long_term( query, relevance_cutoff=0.8, limit=3)print(f"Strict search found {len(strict_results)} results")# Medium relevance - related programming languagesrelated_results = knowledge_memory.search_long_term( query, relevance_cutoff=0.5, limit=5)print(f"Related search found {len(related_results)} results")
from praisonaiagents import Memoryimport os# Setup both providerslocal_memory = Memory(config={ "provider": "rag", "use_embedding": True})cloud_memory = Memory(config={ "provider": "mem0", "config": { "api_key": os.getenv("MEM0_API_KEY") }})# Function to search both providersdef search_all_memory(query: str, use_rerank: bool = True): # Search local memory with relevance cutoff local_results = local_memory.search_long_term( query, relevance_cutoff=0.6, limit=5 ) # Search cloud memory with reranking cloud_results = cloud_memory.search( query=query, agent_id="global", rerank=use_rerank, # Only works with Mem0 limit=5 ) # Combine and deduplicate results all_results = [] seen_content = set() for result in local_results + cloud_results: content = result.get('memory', '') if content not in seen_content: seen_content.add(content) all_results.append(result) # Sort by relevance score all_results.sort( key=lambda x: x.get('score', 0), reverse=True ) return all_results[:10] # Top 10 results# Use the multi-provider searchresults = search_all_memory( "What are the main features of our product?", use_rerank=True)for i, result in enumerate(results, 1): print(f"{i}. {result['memory']}") print(f" Score: {result.get('score', 'N/A')}") print(f" Provider: {result.get('provider', 'unknown')}")