Skip to main content
MySQL provides reliable SQL database persistence with excellent tooling, widespread ecosystem support, and proven performance for web applications and enterprise deployments.

Quick Start

1

Basic Setup

from praisonaiagents import Agent, db

agent = Agent(
    name="MySQLBot",
    instructions="You are a helpful assistant.",
    db=db(database_url="mysql://username:password@localhost:3306/praisonai"),
    session_id="mysql-session"
)

response = agent.chat("Hello! This conversation is stored in MySQL.")
print(response)  # Conversation persisted to MySQL
2

With SSL Connection

from praisonaiagents import Agent, db

# Production setup with SSL
agent = Agent(
    name="SecureBot",
    instructions="You are a helpful assistant.",
    db=db(database_url="mysql://user:pass@hostname:3306/database?ssl_ca=/path/to/ca.pem"),
    session_id="secure-session"
)

agent.chat("This connection uses SSL encryption")

How It Works

MySQL stores conversation data in optimized InnoDB tables:
TableEngineFeatures
conversationsInnoDBACID transactions, foreign keys
messagesInnoDBFull-text indexes, UTF8MB4 support
runsInnoDBExecution tracking, performance stats
tool_callsInnoDBJSON column type (MySQL 5.7+)

Configuration Options

Database URL Formats

# Basic connection
db(database_url="mysql://user:password@localhost:3306/database")

# With specific charset and timezone
db(database_url="mysql://user:pass@host:3306/db?charset=utf8mb4&time_zone=UTC")

# Connection pooling
db(database_url="mysql://user:pass@host:3306/db?pool_size=20&pool_recycle=3600")

# SSL connection
db(database_url="mysql://user:pass@host:3306/db?ssl_ca=/path/to/ca.pem&ssl_verify_cert=true")

Advanced MySQL Configuration

from praisonaiagents import Agent, db

# Custom MySQL database with specific options
mysql_db = db.MySQLDB(
    host="mysql.example.com",
    port=3306,
    user="praisonai_user",
    password="secure_password", 
    database="praisonai_production",
    # MySQL-specific options
    charset="utf8mb4",
    autocommit=True,
    # Connection pool settings
    pool_size=15,
    pool_recycle=3600,  # Recycle connections every hour
    # SSL settings
    ssl_ca="/path/to/ca-cert.pem",
    ssl_verify_cert=True
)

agent = Agent(
    name="ProductionBot",
    instructions="You are a production assistant.",
    db=mysql_db,
    session_id="production-session"
)

Docker Setup

Quick MySQL setup with Docker:
# Start MySQL container
docker run -d \
    --name praisonai-mysql \
    -e MYSQL_ROOT_PASSWORD=root_password \
    -e MYSQL_DATABASE=praisonai \
    -e MYSQL_USER=praisonai_user \
    -e MYSQL_PASSWORD=user_password \
    -p 3306:3306 \
    mysql:8.0

# Wait for MySQL to be ready
docker exec -it praisonai-mysql mysql -u praisonai_user -p praisonai
Then use in your agent:
from praisonaiagents import Agent, db

agent = Agent(
    name="DockerBot",
    db=db(database_url="mysql://praisonai_user:user_password@localhost:3306/praisonai"),
    session_id="docker-session"
)

Advanced Features

JSON Data Storage (MySQL 5.7+)

import mysql.connector
from praisonaiagents import Agent, db

# Create agent with MySQL backend
agent = Agent(
    name="JSONBot",
    db=db(database_url="mysql://user:pass@localhost:3306/jsondb"),
    session_id="json-session"
)

# Have conversations with metadata
agent.chat("Store this with metadata: {'priority': 'high', 'category': 'urgent'}")

# Direct JSON queries
conn = mysql.connector.connect(
    host="localhost",
    database="jsondb",
    user="user", 
    password="pass"
)
cursor = conn.cursor()

# Query JSON metadata (MySQL 5.7+ feature)
cursor.execute("""
    SELECT session_id, content, metadata
    FROM messages 
    WHERE JSON_EXTRACT(metadata, '$.priority') = 'high'
""")

urgent_messages = cursor.fetchall()
for session, content, metadata in urgent_messages:
    print(f"Urgent: {content}")

conn.close()
import mysql.connector
from praisonaiagents import Agent, db

agent = Agent(
    name="SearchBot",
    db=db(database_url="mysql://user:pass@localhost:3306/searchdb"),
    session_id="search-session"
)

# Create some searchable conversations
agent.chat("I love machine learning and artificial intelligence")
agent.chat("Python is great for data science projects")
agent.chat("Natural language processing is fascinating")

# Direct database search
conn = mysql.connector.connect(
    host="localhost",
    database="searchdb",
    user="user",
    password="pass"
)
cursor = conn.cursor()

# Full-text search (requires FULLTEXT index)
cursor.execute("""
    SELECT content, created_at
    FROM messages
    WHERE MATCH(content) AGAINST('machine learning' IN NATURAL LANGUAGE MODE)
    ORDER BY created_at DESC
""")

search_results = cursor.fetchall()
for content, created_at in search_results:
    print(f"Found: {content}")

conn.close()

Replication Setup

MySQL master-slave replication for high availability:
import os
from praisonaiagents import Agent, db

# Write to master, read from slave when possible
master_url = "mysql://user:pass@mysql-master:3306/praisonai"
slave_url = "mysql://user:pass@mysql-slave:3306/praisonai"

agent = Agent(
    name="ReplicatedBot",
    db=db(
        database_url=master_url,
        # Some implementations may support read replicas
    ),
    session_id="replicated-session"
)

# For read-heavy analytics, connect directly to slave
import mysql.connector

slave_conn = mysql.connector.connect(
    host="mysql-slave",
    database="praisonai",
    user="readonly_user",
    password="readonly_pass"
)

# Analytics queries on read replica
cursor = slave_conn.cursor()
cursor.execute("""
    SELECT COUNT(*) as total_conversations
    FROM (SELECT DISTINCT session_id FROM messages) as sessions
""")
total = cursor.fetchone()[0]
print(f"Total conversations: {total}")

Migration and Backup

Data Export and Import

import mysql.connector
import json
from praisonaiagents import Agent, db

# Export conversations to JSON
def export_conversations():
    conn = mysql.connector.connect(
        host="localhost",
        database="praisonai",
        user="user",
        password="pass"
    )
    cursor = conn.cursor()
    
    cursor.execute("""
        SELECT session_id, role, content, created_at
        FROM messages
        ORDER BY created_at
    """)
    
    conversations = []
    for session_id, role, content, created_at in cursor.fetchall():
        conversations.append({
            'session_id': session_id,
            'role': role,
            'content': content, 
            'timestamp': created_at.isoformat()
        })
    
    with open('conversations_backup.json', 'w') as f:
        json.dump(conversations, f, indent=2)
    
    conn.close()
    print("Conversations exported to conversations_backup.json")

# Run export
export_conversations()

Database Backup Script

#!/bin/bash
# backup_mysql.sh

# Configuration
DB_HOST="localhost"
DB_NAME="praisonai"
DB_USER="backup_user"
DB_PASS="backup_password"
BACKUP_DIR="/backups/mysql"
DATE=$(date +%Y%m%d_%H%M%S)

# Create backup directory
mkdir -p $BACKUP_DIR

# Create backup
mysqldump --host=$DB_HOST --user=$DB_USER --password=$DB_PASS \
    --single-transaction --routines --triggers \
    $DB_NAME > $BACKUP_DIR/praisonai_backup_$DATE.sql

# Compress backup
gzip $BACKUP_DIR/praisonai_backup_$DATE.sql

echo "Backup completed: praisonai_backup_$DATE.sql.gz"

Best Practices

  • Use InnoDB engine for ACID compliance and performance
  • Create indexes on frequently queried columns (session_id, created_at)
  • Configure appropriate innodb_buffer_pool_size (50-80% of available RAM)
  • Use connection pooling to manage concurrent connections
  • Use utf8mb4 character set for full Unicode support (including emojis)
  • Set utf8mb4_unicode_ci collation for proper sorting
  • Configure connection charset to match database charset
  • Test with international characters and emojis
  • Use SSL connections for production deployments
  • Create dedicated database users with minimal required privileges
  • Regularly update MySQL to latest stable version
  • Monitor failed login attempts and unusual query patterns
  • Monitor slow query log for optimization opportunities
  • Set up automated backups with point-in-time recovery
  • Use MySQL Enterprise Monitor or Percona Monitoring for detailed metrics
  • Plan for periodic maintenance windows for updates

PostgreSQL Persistence

Alternative SQL database with advanced features

Database Persistence Overview

Compare all available persistence backends