CLI Chatbot
An interactive terminal chatbot with conversation memory and persistent storage.
Features
- Streaming - Real-time response display
- History - Multi-turn conversation context
- Storage - Persist conversations across sessions
- Error Handling - Graceful failure recovery
Quick Start
bash
export OPENAI_API_KEY=your_key
npm run recipe:cli-chatbotHow It Works
This recipe creates an interactive terminal chatbot using Node.js readline, with conversation persistence via Meloqui's FileStorage.
Commands:
- Type any message to chat
exit- Quit the applicationclear- Reset conversation history
Flow:
- User types a message in the terminal
- Message is sent to ChatClient with conversation history
- ChatClient streams the response from OpenAI
- Response tokens display in real-time
- Conversation is saved to disk for future sessions
Architecture
Commands
| Command | Description |
|---|---|
exit | Quit the chatbot |
clear | Clear conversation history |
history | View conversation history |
help | Show available commands |
Code Walkthrough
Setup
typescript
/**
* CLI Chatbot Library
*
* Exported functions for the CLI chatbot recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import {
ChatClient,
FileStorage,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const STORAGE_DIR = './.chat-history';
export const CONVERSATION_ID = 'cli-session';
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
// [start:create-client]
export function createChatClient(apiKey: string): ChatClient {
const storage = new FileStorage(STORAGE_DIR);
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
storage,
conversationId: CONVERSATION_ID
});
}
// [end:create-client]
// [start:streaming]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
// [end:streaming]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function formatHistory(client: ChatClient): Promise<string | null> {
const history = await client.getHistory();
if (history.length === 0) {
return null;
}
const lines: string[] = [];
for (const msg of history) {
const role = msg.role === 'user' ? 'You' : 'AI';
const content = typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content);
const displayContent = content.length > 100 ? content.substring(0, 100) + '...' : content;
lines.push(`${role}: ${displayContent}`);
}
return lines.join('\n');
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui CLI Chat${colors.reset}${colors.cyan} │
│ Type 'exit' to quit, 'clear' to reset │
│ Type 'history' to view conversation │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printHelp(): void {
console.log(`
${colors.yellow}Commands:${colors.reset}
${colors.bright}exit${colors.reset} - Quit the chatbot
${colors.bright}clear${colors.reset} - Clear conversation history
${colors.bright}history${colors.reset} - Show conversation history
${colors.bright}help${colors.reset} - Show this help message
`);
}The FileStorage persists conversations to disk, allowing the chatbot to remember context across restarts.
Streaming Responses
typescript
/**
* CLI Chatbot Library
*
* Exported functions for the CLI chatbot recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import {
ChatClient,
FileStorage,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const STORAGE_DIR = './.chat-history';
export const CONVERSATION_ID = 'cli-session';
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
// [start:create-client]
export function createChatClient(apiKey: string): ChatClient {
const storage = new FileStorage(STORAGE_DIR);
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
storage,
conversationId: CONVERSATION_ID
});
}
// [end:create-client]
// [start:streaming]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
// [end:streaming]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function formatHistory(client: ChatClient): Promise<string | null> {
const history = await client.getHistory();
if (history.length === 0) {
return null;
}
const lines: string[] = [];
for (const msg of history) {
const role = msg.role === 'user' ? 'You' : 'AI';
const content = typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content);
const displayContent = content.length > 100 ? content.substring(0, 100) + '...' : content;
lines.push(`${role}: ${displayContent}`);
}
return lines.join('\n');
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui CLI Chat${colors.reset}${colors.cyan} │
│ Type 'exit' to quit, 'clear' to reset │
│ Type 'history' to view conversation │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printHelp(): void {
console.log(`
${colors.yellow}Commands:${colors.reset}
${colors.bright}exit${colors.reset} - Quit the chatbot
${colors.bright}clear${colors.reset} - Clear conversation history
${colors.bright}history${colors.reset} - Show conversation history
${colors.bright}help${colors.reset} - Show this help message
`);
}Streaming provides real-time output as the AI generates its response, improving perceived responsiveness.
Error Handling
typescript
/**
* CLI Chatbot Library
*
* Exported functions for the CLI chatbot recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import {
ChatClient,
FileStorage,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const STORAGE_DIR = './.chat-history';
export const CONVERSATION_ID = 'cli-session';
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
// [start:create-client]
export function createChatClient(apiKey: string): ChatClient {
const storage = new FileStorage(STORAGE_DIR);
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
storage,
conversationId: CONVERSATION_ID
});
}
// [end:create-client]
// [start:streaming]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
// [end:streaming]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function formatHistory(client: ChatClient): Promise<string | null> {
const history = await client.getHistory();
if (history.length === 0) {
return null;
}
const lines: string[] = [];
for (const msg of history) {
const role = msg.role === 'user' ? 'You' : 'AI';
const content = typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content);
const displayContent = content.length > 100 ? content.substring(0, 100) + '...' : content;
lines.push(`${role}: ${displayContent}`);
}
return lines.join('\n');
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui CLI Chat${colors.reset}${colors.cyan} │
│ Type 'exit' to quit, 'clear' to reset │
│ Type 'history' to view conversation │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printHelp(): void {
console.log(`
${colors.yellow}Commands:${colors.reset}
${colors.bright}exit${colors.reset} - Quit the chatbot
${colors.bright}clear${colors.reset} - Clear conversation history
${colors.bright}history${colors.reset} - Show conversation history
${colors.bright}help${colors.reset} - Show this help message
`);
}Different error types allow targeted handling and user-friendly messages.
Graceful Shutdown
typescript
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.on('close', () => {
console.log('Goodbye!');
process.exit(0);
});Handle Ctrl+C gracefully to ensure a clean exit.
Customization
Change the Model
typescript
const client = new ChatClient({
provider: 'anthropic',
model: 'claude-3-haiku-20240307',
// ...
});Use Ollama (Local)
typescript
const client = new ChatClient({
provider: 'ollama',
model: 'llama3',
// No API key needed for local models
});