Skip to main content

Next.js Integration

Build AI-powered Next.js applications with streaming, server components, and API routes.

Quick Start

App Router

// app/api/chat/route.ts
import { Agent, createRouteHandler } from 'praisonai-ts';

const agent = new Agent({
  name: 'ChatAgent',
  instructions: 'You are a helpful assistant.',
});

export const POST = createRouteHandler({ agent });

Client Component

// app/chat/page.tsx
'use client';

import { useState } from 'react';

export default function ChatPage() {
  const [messages, setMessages] = useState<string[]>([]);
  const [input, setInput] = useState('');

  const sendMessage = async () => {
    const response = await fetch('/api/chat', {
      method: 'POST',
      headers: { 'Content-Type': 'application/json' },
      body: JSON.stringify({ message: input }),
    });
    
    const data = await response.json();
    setMessages([...messages, input, data.response]);
    setInput('');
  };

  return (
    <div>
      {messages.map((msg, i) => (
        <p key={i}>{msg}</p>
      ))}
      <input value={input} onChange={(e) => setInput(e.target.value)} />
      <button onClick={sendMessage}>Send</button>
    </div>
  );
}

Streaming Responses

API Route with Streaming

// app/api/chat/route.ts
import { Agent, createRouteHandler } from 'praisonai-ts';

const agent = new Agent({
  name: 'StreamingAgent',
  instructions: 'You are a helpful assistant.',
});

export const POST = createRouteHandler({
  agent,
  streaming: true,
});

Client with Streaming

'use client';

import { useState } from 'react';

export default function StreamingChat() {
  const [response, setResponse] = useState('');

  const sendMessage = async (message: string) => {
    setResponse('');
    
    const res = await fetch('/api/chat', {
      method: 'POST',
      headers: { 'Content-Type': 'application/json' },
      body: JSON.stringify({ message }),
    });

    const reader = res.body?.getReader();
    const decoder = new TextDecoder();

    while (reader) {
      const { done, value } = await reader.read();
      if (done) break;
      
      const chunk = decoder.decode(value);
      const lines = chunk.split('\n');
      
      for (const line of lines) {
        if (line.startsWith('data: ')) {
          const data = JSON.parse(line.slice(6));
          if (data.type === 'text') {
            setResponse(prev => prev + data.content);
          }
        }
      }
    }
  };

  return (
    <div>
      <p>{response}</p>
      <button onClick={() => sendMessage('Hello!')}>Send</button>
    </div>
  );
}

Pages Router

// pages/api/chat.ts
import type { NextApiRequest, NextApiResponse } from 'next';
import { Agent, createPagesHandler } from 'praisonai-ts';

const agent = new Agent({
  name: 'PagesAgent',
  instructions: 'You are a helpful assistant.',
});

export default createPagesHandler({ agent });

Server Components

// app/page.tsx
import { Agent } from 'praisonai-ts';

const agent = new Agent({
  name: 'ServerAgent',
  instructions: 'You are a helpful assistant.',
});

export default async function Page() {
  const response = await agent.chat('What is PraisonAI?');
  
  return (
    <div>
      <h1>AI Response</h1>
      <p>{response}</p>
    </div>
  );
}

With Tools

// app/api/chat/route.ts
import { Agent, createRouteHandler } from 'praisonai-ts';

const agent = new Agent({
  name: 'ToolAgent',
  instructions: 'You help with calculations and searches.',
  tools: [
    {
      name: 'calculate',
      description: 'Perform math calculations',
      execute: async ({ expression }) => {
        return { result: eval(expression) };
      },
    },
  ],
});

export const POST = createRouteHandler({
  agent,
  streaming: true,
});

UIMessage Protocol

For compatibility with AI SDK’s useChat:
// app/api/chat/route.ts
import { Agent, toUIMessageStreamResponse } from 'praisonai-ts';

const agent = new Agent({
  name: 'UIAgent',
  instructions: 'You are a helpful assistant.',
});

export async function POST(req: Request) {
  const { messages } = await req.json();
  
  const stream = await agent.streamChat(messages[messages.length - 1].content);
  
  return toUIMessageStreamResponse(stream);
}

Environment Variables

OPENAI_API_KEY=sk-...

Configuration

export const POST = createRouteHandler({
  agent,
  
  // Streaming
  streaming: true,
  
  // CORS (for external access)
  cors: {
    origin: ['https://example.com'],
    methods: ['POST'],
  },
  
  // Rate limiting
  rateLimit: {
    windowMs: 60000,
    max: 100,
  },
});

Best Practices

  1. Use streaming - Better UX for long responses
  2. Handle errors - Show user-friendly error messages
  3. Add loading states - Indicate when AI is thinking
  4. Secure API routes - Add authentication
  5. Cache responses - For repeated queries