Skip to main content
PostgreSQL provides enterprise-grade SQL database persistence with advanced features like JSONB storage, full-text search, and horizontal scaling for production applications.

Quick Start

1

Basic Setup

from praisonaiagents import Agent, db

agent = Agent(
    name="ProductionBot",
    instructions="You are a helpful assistant.",
    db=db(database_url="postgresql://username:password@localhost:5432/praisonai"),
    session_id="prod-session"
)

response = agent.chat("Hello! This is saved to PostgreSQL.")
print(response)  # Conversation persisted to PostgreSQL
2

With Connection Pooling

from praisonaiagents import Agent, db

# Production configuration with connection pooling
agent = Agent(
    name="ScalableBot",
    instructions="You are a helpful assistant.",
    db=db(database_url="postgresql://user:pass@localhost:5432/mydb?pool_size=10"),
    session_id="scalable-session"
)

agent.chat("This uses connection pooling for better performance")

How It Works

PostgreSQL stores data in optimized tables with advanced features:
TableFeaturesBenefits
conversationsJSONB metadata, indexesFast metadata queries
messagesFull-text search, timestampsSearchable conversation history
runsExecution tracking, performance statsAgent monitoring
tool_callsJSON tool parametersRich tool usage analytics

Configuration Options

Database URL Formats

# Basic connection
db(database_url="postgresql://user:password@localhost:5432/database")

# With SSL (production recommended)
db(database_url="postgresql://user:pass@host:5432/db?sslmode=require")

# Connection pooling
db(database_url="postgresql://user:pass@host:5432/db?pool_size=20&max_overflow=5")

# Read replica configuration
db(database_url="postgresql://user:pass@primary:5432/db")

Advanced PostgreSQL Configuration

from praisonaiagents import Agent, db

# Custom PostgreSQL database with specific options
postgres_db = db.PostgresDB(
    host="localhost",
    port=5432,
    user="praisonai_user", 
    password="secure_password",
    database="praisonai_production",
    # Connection pool settings
    pool_size=10,
    max_overflow=20,
    # SSL settings for production
    sslmode="require"
)

agent = Agent(
    name="EnterpriseBot",
    instructions="You are a production assistant.",
    db=postgres_db,
    session_id="enterprise-session"
)

Docker Setup

Quick PostgreSQL setup with Docker:
# Start PostgreSQL container
docker run -d \
    --name praisonai-postgres \
    -e POSTGRES_DB=praisonai \
    -e POSTGRES_USER=praisonai_user \
    -e POSTGRES_PASSWORD=your_password \
    -p 5432:5432 \
    postgres:15

# Connect and verify
docker exec -it praisonai-postgres psql -U praisonai_user -d praisonai
Then use in your agent:
from praisonaiagents import Agent, db

agent = Agent(
    name="DockerBot",
    db=db(database_url="postgresql://praisonai_user:your_password@localhost:5432/praisonai"),
    session_id="docker-session"
)

Advanced Queries and Analytics

PostgreSQL enables powerful analytics on conversation data:
import psycopg2
from praisonaiagents import Agent, db

# Create agent and have conversations
agent = Agent(
    name="AnalyticsBot",
    db=db(database_url="postgresql://user:pass@localhost:5432/analytics"),
    session_id="analytics-session"
)

# Have some conversations
agent.chat("Help me analyze customer feedback")
agent.chat("What are the trending topics this month?")

# Direct database access for analytics
conn = psycopg2.connect(
    host="localhost",
    database="analytics", 
    user="user",
    password="pass"
)
cursor = conn.cursor()

# Advanced analytics queries
cursor.execute("""
    SELECT 
        DATE_TRUNC('hour', created_at) as hour,
        COUNT(*) as message_count,
        COUNT(DISTINCT session_id) as unique_sessions
    FROM messages 
    WHERE created_at > NOW() - INTERVAL '24 hours'
    GROUP BY hour
    ORDER BY hour;
""")

analytics = cursor.fetchall()
for hour, msg_count, sessions in analytics:
    print(f"{hour}: {msg_count} messages, {sessions} sessions")

# Full-text search across conversations
cursor.execute("""
    SELECT session_id, content, created_at
    FROM messages
    WHERE content ILIKE %s
    ORDER BY created_at DESC
    LIMIT 10;
""", ("%customer feedback%",))

results = cursor.fetchall()
conn.close()

High Availability Setup

For production deployments with high availability:
import os
from praisonaiagents import Agent, db

# Primary database with read replica
primary_db_url = os.getenv("PRIMARY_DATABASE_URL")
replica_db_url = os.getenv("REPLICA_DATABASE_URL")

# Write to primary, read from replica when possible
agent = Agent(
    name="HABot",
    db=db(
        database_url=primary_db_url,
        # Some implementations support read replicas
        read_replica_url=replica_db_url
    ),
    session_id="ha-session"
)

Migration from SQLite

Migrate existing SQLite data to PostgreSQL:
import sqlite3
import psycopg2
from praisonaiagents import db

# Export from SQLite
sqlite_conn = sqlite3.connect("old_conversations.db") 
sqlite_cursor = sqlite_conn.cursor()

sqlite_cursor.execute("SELECT session_id, role, content, created_at FROM messages")
messages = sqlite_cursor.fetchall()

# Import to PostgreSQL  
postgres_conn = psycopg2.connect("postgresql://user:pass@localhost:5432/new_db")
postgres_cursor = postgres_conn.cursor()

for session_id, role, content, created_at in messages:
    postgres_cursor.execute("""
        INSERT INTO messages (session_id, role, content, created_at)
        VALUES (%s, %s, %s, %s)
    """, (session_id, role, content, created_at))

postgres_conn.commit()
postgres_conn.close()
sqlite_conn.close()

print("Migration completed!")

Best Practices

  • Use connection pooling in production (pool_size=10-20)
  • Configure appropriate timeouts for your use case
  • Monitor connection pool metrics
  • Use persistent connections for long-running applications
  • Create indexes on frequently queried columns (session_id, created_at)
  • Use EXPLAIN ANALYZE to optimize slow queries
  • Consider partitioning large tables by date
  • Use read replicas for analytics workloads
  • Always use SSL connections in production (sslmode=require)
  • Use environment variables for credentials
  • Implement database user permissions (read-only for analytics)
  • Regular security updates and patches
  • Set up automated daily backups with pg_dump
  • Test backup restoration procedures regularly
  • Use point-in-time recovery for critical applications
  • Consider cross-region backup replication

MySQL Persistence

Alternative SQL database option with different strengths

Database Persistence Overview

Compare all available persistence backends