Tool Configuration
This page provides comprehensive documentation for configuring tools in PraisonAI, including timeout settings, performance optimization, error handling, and resource management.Tool Timeout Configuration
Basic Timeout Settings
Copy
from praisonaiagents import Agent, Tool
# Configure tool with timeout
tool = Tool(
name="web_scraper",
description="Scrape web pages",
function=scrape_webpage,
config={
"timeout": 30, # seconds
"retry_on_timeout": True,
"timeout_message": "Web scraping timed out"
}
)
agent = Agent(
name="Researcher",
tools=[tool],
tool_config={
"global_timeout": 60, # Global timeout for all tools
"timeout_strategy": "graceful" # or "immediate"
}
)
Advanced Timeout Configuration
Copy
timeout_config = {
# Per-tool timeout settings
"tool_timeouts": {
"web_search": {
"timeout": 30,
"connect_timeout": 5,
"read_timeout": 25,
"retry_on_timeout": True,
"max_retries": 2
},
"database_query": {
"timeout": 120,
"statement_timeout": 60, # SQL statement timeout
"lock_timeout": 10, # Lock acquisition timeout
"idle_timeout": 300 # Connection idle timeout
},
"api_call": {
"timeout": 45,
"dynamic_timeout": {
"base": 30,
"per_item": 0.5, # Additional time per item
"max": 120
}
}
},
# Global timeout settings
"global_config": {
"default_timeout": 60,
"max_total_time": 300, # Maximum time for all tool executions
"parallel_timeout": 120, # Timeout for parallel executions
"timeout_buffer": 5 # Buffer time for cleanup
}
}
Dynamic Timeout Calculation
Copy
def calculate_dynamic_timeout(tool_name, input_data):
"""Calculate timeout based on input complexity"""
base_timeouts = {
"data_processor": 30,
"ml_model": 60,
"web_scraper": 45
}
base = base_timeouts.get(tool_name, 60)
# Adjust based on input size
if hasattr(input_data, '__len__'):
size_factor = len(input_data) / 100
return min(base + (size_factor * 10), 300)
return base
# Configure dynamic timeouts
dynamic_timeout_config = {
"timeout_calculator": calculate_dynamic_timeout,
"enable_dynamic": True,
"cache_calculations": True
}
Performance Tuning
Resource Management
Copy
performance_config = {
# CPU and memory limits
"resource_limits": {
"max_cpu_percent": 80,
"max_memory_mb": 2048,
"max_threads": 10,
"max_processes": 4
},
# Execution optimization
"execution": {
"parallel_tools": True,
"max_parallel": 5,
"queue_size": 100,
"priority_queue": True,
"batch_processing": True,
"batch_size": 10
},
# Caching configuration
"cache": {
"enabled": True,
"cache_size": 1000,
"ttl": 3600,
"cache_strategy": "lru",
"serialize_method": "pickle"
}
}
Tool Execution Strategies
Copy
# Sequential execution (default)
sequential_config = {
"execution_strategy": "sequential",
"fail_fast": True,
"continue_on_error": False
}
# Parallel execution
parallel_config = {
"execution_strategy": "parallel",
"max_workers": 5,
"thread_pool": True, # Use threads instead of processes
"chunk_size": 10,
"ordered_results": True
}
# Async execution
async_config = {
"execution_strategy": "async",
"event_loop": "asyncio",
"max_concurrent": 10,
"semaphore_limit": 5,
"gather_errors": True
}
# Pipeline execution
pipeline_config = {
"execution_strategy": "pipeline",
"stages": [
{"name": "fetch", "parallel": True},
{"name": "process", "parallel": False},
{"name": "store", "parallel": True}
],
"buffer_size": 100,
"backpressure": True
}
Performance Monitoring
Copy
monitoring_config = {
"metrics": {
"track_execution_time": True,
"track_memory_usage": True,
"track_cpu_usage": True,
"track_io_operations": True
},
"profiling": {
"enabled": False, # Enable for debugging
"profile_type": "cProfile",
"output_format": "stats",
"top_functions": 20
},
"alerts": {
"slow_execution": {
"threshold": 30, # seconds
"action": "log_warning"
},
"high_memory": {
"threshold": 1024, # MB
"action": "garbage_collect"
},
"repeated_timeouts": {
"threshold": 3,
"window": 300, # 5 minutes
"action": "circuit_break"
}
}
}
Tool-Specific Configurations
Web Scraping Tools
Copy
web_scraping_config = {
"timeout": 45,
"page_load_timeout": 30,
"script_timeout": 20,
"implicit_wait": 10,
"retry_config": {
"max_retries": 3,
"retry_on": ["timeout", "connection_error"],
"backoff_factor": 2
},
"performance": {
"headless": True,
"disable_images": True,
"disable_javascript": False,
"use_cache": True,
"connection_pool_size": 10
},
"rate_limiting": {
"requests_per_second": 2,
"burst_size": 5,
"respect_robots_txt": True
}
}
Database Tools
Copy
database_config = {
"timeout": 60,
"connection_timeout": 10,
"statement_timeout": 50,
"pool_config": {
"min_connections": 2,
"max_connections": 20,
"connection_lifetime": 3600,
"idle_timeout": 600,
"retry_on_connection_failure": True
},
"query_optimization": {
"use_prepared_statements": True,
"batch_size": 1000,
"fetch_size": 100,
"use_compression": True
},
"transaction_config": {
"isolation_level": "READ_COMMITTED",
"auto_commit": False,
"lock_timeout": 5
}
}
API Tools
Copy
api_tool_config = {
"timeout": 30,
"connect_timeout": 5,
"read_timeout": 25,
"retry_config": {
"max_retries": 3,
"retry_on_status": [429, 500, 502, 503],
"exponential_backoff": True,
"respect_retry_after": True
},
"rate_limiting": {
"calls_per_minute": 60,
"calls_per_hour": 1000,
"burst_allowance": 10,
"queue_excess": True
},
"circuit_breaker": {
"enabled": True,
"failure_threshold": 5,
"timeout": 60,
"half_open_calls": 2
}
}
File Processing Tools
Copy
file_processing_config = {
"timeout": 120,
"chunk_size": 8192, # bytes
"file_limits": {
"max_file_size": 104857600, # 100MB
"allowed_extensions": [".txt", ".csv", ".json", ".xml"],
"scan_for_malware": True
},
"processing": {
"use_memory_mapping": True,
"parallel_processing": True,
"compression": "auto", # auto-detect
"encoding": "utf-8",
"error_handling": "replace"
},
"optimization": {
"buffer_size": 65536,
"read_ahead": True,
"use_native_operations": True
}
}
Error Handling Configuration
Copy
error_handling_config = {
"global_error_handler": "graceful", # or "strict", "logging"
"error_strategies": {
"TimeoutError": {
"retry": True,
"max_retries": 2,
"increase_timeout": 1.5,
"fallback_tool": "simple_fetcher"
},
"RateLimitError": {
"retry": True,
"use_exponential_backoff": True,
"switch_endpoint": True
},
"ValidationError": {
"retry": False,
"log_level": "ERROR",
"user_message": "Invalid input provided"
}
},
"recovery_strategies": {
"checkpoint_enabled": True,
"save_partial_results": True,
"resume_on_restart": True
}
}
Resource Pooling
Copy
pooling_config = {
"connection_pools": {
"http": {
"pool_size": 20,
"max_overflow": 10,
"timeout": 30,
"recycle": 3600,
"pre_ping": True
},
"database": {
"pool_size": 10,
"max_overflow": 5,
"timeout": 30,
"recycle": 1800
}
},
"thread_pools": {
"default": {
"min_threads": 2,
"max_threads": 10,
"queue_size": 100,
"thread_name_prefix": "tool-"
}
},
"resource_sharing": {
"share_connections": True,
"share_sessions": True,
"cleanup_interval": 300
}
}
Complete Tool Configuration Example
Copy
from praisonaiagents import Agent, Tool
# Configure a web search tool with comprehensive settings
web_search_tool = Tool(
name="advanced_web_search",
description="Advanced web search with optimizations",
function=perform_web_search,
config={
# Timeout settings
"timeout": 30,
"connect_timeout": 5,
"read_timeout": 25,
# Performance settings
"cache_enabled": True,
"cache_ttl": 3600,
"parallel_requests": 5,
"batch_size": 10,
# Error handling
"retry_on_timeout": True,
"max_retries": 3,
"exponential_backoff": True,
# Rate limiting
"rate_limit": 60, # per minute
"burst_size": 10,
# Resource limits
"max_memory_mb": 512,
"max_response_size": 10485760 # 10MB
}
)
# Create agent with tool configuration
agent = Agent(
name="SearchAgent",
tools=[web_search_tool],
tool_config={
# Global timeout settings
"global_timeout": 300,
"timeout_strategy": "graceful",
# Performance settings
"parallel_execution": True,
"max_parallel_tools": 3,
"cache_tool_results": True,
# Monitoring
"track_metrics": True,
"log_slow_executions": True,
"slow_threshold": 10 # seconds
}
)
Environment Variables
Copy
# Tool timeout settings
export PRAISONAI_TOOL_TIMEOUT="60"
export PRAISONAI_TOOL_CONNECT_TIMEOUT="5"
export PRAISONAI_TOOL_READ_TIMEOUT="55"
# Performance settings
export PRAISONAI_TOOL_PARALLEL="true"
export PRAISONAI_TOOL_MAX_WORKERS="5"
export PRAISONAI_TOOL_CACHE_ENABLED="true"
export PRAISONAI_TOOL_CACHE_SIZE="1000"
# Resource limits
export PRAISONAI_TOOL_MAX_MEMORY="2048" # MB
export PRAISONAI_TOOL_MAX_CPU="80" # Percent
# Error handling
export PRAISONAI_TOOL_MAX_RETRIES="3"
export PRAISONAI_TOOL_RETRY_DELAY="2"
# Monitoring
export PRAISONAI_TOOL_METRICS="true"
export PRAISONAI_TOOL_PROFILE="false"
Best Practices
Timeout Guidelines
-
Set appropriate timeouts based on tool type
- Fast operations: 5-15 seconds
- API calls: 15-30 seconds
- Web scraping: 30-60 seconds
- Data processing: 60-300 seconds
-
Use dynamic timeouts for variable workloads
Copy
timeout = base_timeout + (item_count * per_item_timeout) -
Always set connection timeouts lower than read timeouts
Copy
connect_timeout = 5 # Quick failure if can't connect read_timeout = 25 # More time for actual operation
Performance Optimization
- Enable caching for idempotent operations
- Use connection pooling for repeated operations
- Implement circuit breakers for unreliable services
- Monitor and alert on performance degradation
See Also
- Tool Development - Creating custom tools
- Agent Configuration - Agent-level tool settings
- Best Practices - Configuration guidelines

