Document Q&A
A document question-answering system using RAG (Retrieval-Augmented Generation).
Features
- RAG - Index documents and retrieve relevant context
- Streaming - Real-time answer display
- Citations - Answers reference source documents
- Error Handling - Graceful failure recovery
Quick Start
bash
export OPENAI_API_KEY=your_key
npm run recipe:doc-qa -- ./docsHow It Works
This recipe builds a document Q&A system using RAG (Retrieval-Augmented Generation) to answer questions based on your local documents.
Supported formats:
- Markdown (
.md) - Text files (
.txt)
Flow:
- Scan directory for documents
- Split documents into chunks and generate embeddings
- Store embeddings in memory for similarity search
- User asks a question
- Find relevant chunks via semantic search
- Send question + context to LLM for answer generation
- Display answer with source citations
Architecture
Code Walkthrough
Configuration
Default configuration values used for document chunking and RAG:
typescript
/**
* Document Q&A Library
*
* Exported functions for the Document Q&A recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import * as fs from 'fs';
import * as path from 'path';
import {
ChatClient,
OpenAIEmbeddings,
InMemoryDocumentStore,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const SUPPORTED_EXTENSIONS = ['.md', '.txt'];
export const CHUNK_SIZE = 500;
export const CHUNK_OVERLAP = 50;
export const TOP_K = 3;
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
export interface IndexedFile {
name: string;
chunks: number;
}
// [start:create-embeddings]
export function createEmbeddings(apiKey: string): OpenAIEmbeddings {
return new OpenAIEmbeddings({ apiKey });
}
// [end:create-embeddings]
// [start:create-document-store]
export function createDocumentStore(embeddings: OpenAIEmbeddings): InMemoryDocumentStore {
return new InMemoryDocumentStore({
embeddingProvider: embeddings,
chunkSize: CHUNK_SIZE,
chunkOverlap: CHUNK_OVERLAP
});
}
// [end:create-document-store]
// [start:create-rag-client]
export function createRAGClient(apiKey: string, documentStore: InMemoryDocumentStore): ChatClient {
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
documentStore,
ragOptions: {
topK: TOP_K
}
});
}
// [end:create-rag-client]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function findDocuments(directory: string): Promise<string[]> {
const files: string[] = [];
async function scanDir(dir: string): Promise<void> {
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {
await scanDir(fullPath);
}
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (SUPPORTED_EXTENSIONS.includes(ext)) {
files.push(fullPath);
}
}
}
}
await scanDir(directory);
return files.sort();
}
// [start:index-documents]
export async function indexDocuments(
documentStore: InMemoryDocumentStore,
files: string[],
baseDir: string,
onProgress?: (file: string, status: 'success' | 'empty' | 'error', chunks?: number) => void
): Promise<IndexedFile[]> {
const indexed: IndexedFile[] = [];
for (const filePath of files) {
const relativePath = path.relative(baseDir, filePath);
try {
const content = await fs.promises.readFile(filePath, 'utf-8');
if (!content.trim()) {
onProgress?.(relativePath, 'empty');
continue;
}
const chunkCountBefore = documentStore.chunkCount;
await documentStore.addDocuments([
{
content,
source: relativePath,
metadata: {
path: filePath,
extension: path.extname(filePath)
}
}
]);
const chunksAdded = documentStore.chunkCount - chunkCountBefore;
indexed.push({ name: relativePath, chunks: chunksAdded });
onProgress?.(relativePath, 'success', chunksAdded);
} catch {
onProgress?.(relativePath, 'error');
}
}
return indexed;
}
// [end:index-documents]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui Document Q&A${colors.reset}${colors.cyan} │
│ Ask questions about your documents │
│ Type 'exit' to quit │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printUsage(): void {
console.log(`
${colors.yellow}Usage:${colors.reset}
npm run recipe:doc-qa -- <directory>
${colors.yellow}Example:${colors.reset}
npm run recipe:doc-qa -- ./docs
${colors.yellow}Supported files:${colors.reset}
${SUPPORTED_EXTENSIONS.join(', ')}
`);
}Create Embeddings
Create an OpenAI embeddings provider for generating document vectors:
typescript
/**
* Document Q&A Library
*
* Exported functions for the Document Q&A recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import * as fs from 'fs';
import * as path from 'path';
import {
ChatClient,
OpenAIEmbeddings,
InMemoryDocumentStore,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const SUPPORTED_EXTENSIONS = ['.md', '.txt'];
export const CHUNK_SIZE = 500;
export const CHUNK_OVERLAP = 50;
export const TOP_K = 3;
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
export interface IndexedFile {
name: string;
chunks: number;
}
// [start:create-embeddings]
export function createEmbeddings(apiKey: string): OpenAIEmbeddings {
return new OpenAIEmbeddings({ apiKey });
}
// [end:create-embeddings]
// [start:create-document-store]
export function createDocumentStore(embeddings: OpenAIEmbeddings): InMemoryDocumentStore {
return new InMemoryDocumentStore({
embeddingProvider: embeddings,
chunkSize: CHUNK_SIZE,
chunkOverlap: CHUNK_OVERLAP
});
}
// [end:create-document-store]
// [start:create-rag-client]
export function createRAGClient(apiKey: string, documentStore: InMemoryDocumentStore): ChatClient {
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
documentStore,
ragOptions: {
topK: TOP_K
}
});
}
// [end:create-rag-client]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function findDocuments(directory: string): Promise<string[]> {
const files: string[] = [];
async function scanDir(dir: string): Promise<void> {
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {
await scanDir(fullPath);
}
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (SUPPORTED_EXTENSIONS.includes(ext)) {
files.push(fullPath);
}
}
}
}
await scanDir(directory);
return files.sort();
}
// [start:index-documents]
export async function indexDocuments(
documentStore: InMemoryDocumentStore,
files: string[],
baseDir: string,
onProgress?: (file: string, status: 'success' | 'empty' | 'error', chunks?: number) => void
): Promise<IndexedFile[]> {
const indexed: IndexedFile[] = [];
for (const filePath of files) {
const relativePath = path.relative(baseDir, filePath);
try {
const content = await fs.promises.readFile(filePath, 'utf-8');
if (!content.trim()) {
onProgress?.(relativePath, 'empty');
continue;
}
const chunkCountBefore = documentStore.chunkCount;
await documentStore.addDocuments([
{
content,
source: relativePath,
metadata: {
path: filePath,
extension: path.extname(filePath)
}
}
]);
const chunksAdded = documentStore.chunkCount - chunkCountBefore;
indexed.push({ name: relativePath, chunks: chunksAdded });
onProgress?.(relativePath, 'success', chunksAdded);
} catch {
onProgress?.(relativePath, 'error');
}
}
return indexed;
}
// [end:index-documents]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui Document Q&A${colors.reset}${colors.cyan} │
│ Ask questions about your documents │
│ Type 'exit' to quit │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printUsage(): void {
console.log(`
${colors.yellow}Usage:${colors.reset}
npm run recipe:doc-qa -- <directory>
${colors.yellow}Example:${colors.reset}
npm run recipe:doc-qa -- ./docs
${colors.yellow}Supported files:${colors.reset}
${SUPPORTED_EXTENSIONS.join(', ')}
`);
}Create Document Store
Create an in-memory document store with chunking configuration:
typescript
/**
* Document Q&A Library
*
* Exported functions for the Document Q&A recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import * as fs from 'fs';
import * as path from 'path';
import {
ChatClient,
OpenAIEmbeddings,
InMemoryDocumentStore,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const SUPPORTED_EXTENSIONS = ['.md', '.txt'];
export const CHUNK_SIZE = 500;
export const CHUNK_OVERLAP = 50;
export const TOP_K = 3;
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
export interface IndexedFile {
name: string;
chunks: number;
}
// [start:create-embeddings]
export function createEmbeddings(apiKey: string): OpenAIEmbeddings {
return new OpenAIEmbeddings({ apiKey });
}
// [end:create-embeddings]
// [start:create-document-store]
export function createDocumentStore(embeddings: OpenAIEmbeddings): InMemoryDocumentStore {
return new InMemoryDocumentStore({
embeddingProvider: embeddings,
chunkSize: CHUNK_SIZE,
chunkOverlap: CHUNK_OVERLAP
});
}
// [end:create-document-store]
// [start:create-rag-client]
export function createRAGClient(apiKey: string, documentStore: InMemoryDocumentStore): ChatClient {
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
documentStore,
ragOptions: {
topK: TOP_K
}
});
}
// [end:create-rag-client]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function findDocuments(directory: string): Promise<string[]> {
const files: string[] = [];
async function scanDir(dir: string): Promise<void> {
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {
await scanDir(fullPath);
}
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (SUPPORTED_EXTENSIONS.includes(ext)) {
files.push(fullPath);
}
}
}
}
await scanDir(directory);
return files.sort();
}
// [start:index-documents]
export async function indexDocuments(
documentStore: InMemoryDocumentStore,
files: string[],
baseDir: string,
onProgress?: (file: string, status: 'success' | 'empty' | 'error', chunks?: number) => void
): Promise<IndexedFile[]> {
const indexed: IndexedFile[] = [];
for (const filePath of files) {
const relativePath = path.relative(baseDir, filePath);
try {
const content = await fs.promises.readFile(filePath, 'utf-8');
if (!content.trim()) {
onProgress?.(relativePath, 'empty');
continue;
}
const chunkCountBefore = documentStore.chunkCount;
await documentStore.addDocuments([
{
content,
source: relativePath,
metadata: {
path: filePath,
extension: path.extname(filePath)
}
}
]);
const chunksAdded = documentStore.chunkCount - chunkCountBefore;
indexed.push({ name: relativePath, chunks: chunksAdded });
onProgress?.(relativePath, 'success', chunksAdded);
} catch {
onProgress?.(relativePath, 'error');
}
}
return indexed;
}
// [end:index-documents]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui Document Q&A${colors.reset}${colors.cyan} │
│ Ask questions about your documents │
│ Type 'exit' to quit │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printUsage(): void {
console.log(`
${colors.yellow}Usage:${colors.reset}
npm run recipe:doc-qa -- <directory>
${colors.yellow}Example:${colors.reset}
npm run recipe:doc-qa -- ./docs
${colors.yellow}Supported files:${colors.reset}
${SUPPORTED_EXTENSIONS.join(', ')}
`);
}Create RAG Client
Create a ChatClient configured with the document store for RAG:
typescript
/**
* Document Q&A Library
*
* Exported functions for the Document Q&A recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import * as fs from 'fs';
import * as path from 'path';
import {
ChatClient,
OpenAIEmbeddings,
InMemoryDocumentStore,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const SUPPORTED_EXTENSIONS = ['.md', '.txt'];
export const CHUNK_SIZE = 500;
export const CHUNK_OVERLAP = 50;
export const TOP_K = 3;
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
export interface IndexedFile {
name: string;
chunks: number;
}
// [start:create-embeddings]
export function createEmbeddings(apiKey: string): OpenAIEmbeddings {
return new OpenAIEmbeddings({ apiKey });
}
// [end:create-embeddings]
// [start:create-document-store]
export function createDocumentStore(embeddings: OpenAIEmbeddings): InMemoryDocumentStore {
return new InMemoryDocumentStore({
embeddingProvider: embeddings,
chunkSize: CHUNK_SIZE,
chunkOverlap: CHUNK_OVERLAP
});
}
// [end:create-document-store]
// [start:create-rag-client]
export function createRAGClient(apiKey: string, documentStore: InMemoryDocumentStore): ChatClient {
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
documentStore,
ragOptions: {
topK: TOP_K
}
});
}
// [end:create-rag-client]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function findDocuments(directory: string): Promise<string[]> {
const files: string[] = [];
async function scanDir(dir: string): Promise<void> {
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {
await scanDir(fullPath);
}
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (SUPPORTED_EXTENSIONS.includes(ext)) {
files.push(fullPath);
}
}
}
}
await scanDir(directory);
return files.sort();
}
// [start:index-documents]
export async function indexDocuments(
documentStore: InMemoryDocumentStore,
files: string[],
baseDir: string,
onProgress?: (file: string, status: 'success' | 'empty' | 'error', chunks?: number) => void
): Promise<IndexedFile[]> {
const indexed: IndexedFile[] = [];
for (const filePath of files) {
const relativePath = path.relative(baseDir, filePath);
try {
const content = await fs.promises.readFile(filePath, 'utf-8');
if (!content.trim()) {
onProgress?.(relativePath, 'empty');
continue;
}
const chunkCountBefore = documentStore.chunkCount;
await documentStore.addDocuments([
{
content,
source: relativePath,
metadata: {
path: filePath,
extension: path.extname(filePath)
}
}
]);
const chunksAdded = documentStore.chunkCount - chunkCountBefore;
indexed.push({ name: relativePath, chunks: chunksAdded });
onProgress?.(relativePath, 'success', chunksAdded);
} catch {
onProgress?.(relativePath, 'error');
}
}
return indexed;
}
// [end:index-documents]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui Document Q&A${colors.reset}${colors.cyan} │
│ Ask questions about your documents │
│ Type 'exit' to quit │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printUsage(): void {
console.log(`
${colors.yellow}Usage:${colors.reset}
npm run recipe:doc-qa -- <directory>
${colors.yellow}Example:${colors.reset}
npm run recipe:doc-qa -- ./docs
${colors.yellow}Supported files:${colors.reset}
${SUPPORTED_EXTENSIONS.join(', ')}
`);
}Index Documents
Index documents from files with progress tracking:
typescript
/**
* Document Q&A Library
*
* Exported functions for the Document Q&A recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import * as fs from 'fs';
import * as path from 'path';
import {
ChatClient,
OpenAIEmbeddings,
InMemoryDocumentStore,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const SUPPORTED_EXTENSIONS = ['.md', '.txt'];
export const CHUNK_SIZE = 500;
export const CHUNK_OVERLAP = 50;
export const TOP_K = 3;
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
export interface IndexedFile {
name: string;
chunks: number;
}
// [start:create-embeddings]
export function createEmbeddings(apiKey: string): OpenAIEmbeddings {
return new OpenAIEmbeddings({ apiKey });
}
// [end:create-embeddings]
// [start:create-document-store]
export function createDocumentStore(embeddings: OpenAIEmbeddings): InMemoryDocumentStore {
return new InMemoryDocumentStore({
embeddingProvider: embeddings,
chunkSize: CHUNK_SIZE,
chunkOverlap: CHUNK_OVERLAP
});
}
// [end:create-document-store]
// [start:create-rag-client]
export function createRAGClient(apiKey: string, documentStore: InMemoryDocumentStore): ChatClient {
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
documentStore,
ragOptions: {
topK: TOP_K
}
});
}
// [end:create-rag-client]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function findDocuments(directory: string): Promise<string[]> {
const files: string[] = [];
async function scanDir(dir: string): Promise<void> {
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {
await scanDir(fullPath);
}
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (SUPPORTED_EXTENSIONS.includes(ext)) {
files.push(fullPath);
}
}
}
}
await scanDir(directory);
return files.sort();
}
// [start:index-documents]
export async function indexDocuments(
documentStore: InMemoryDocumentStore,
files: string[],
baseDir: string,
onProgress?: (file: string, status: 'success' | 'empty' | 'error', chunks?: number) => void
): Promise<IndexedFile[]> {
const indexed: IndexedFile[] = [];
for (const filePath of files) {
const relativePath = path.relative(baseDir, filePath);
try {
const content = await fs.promises.readFile(filePath, 'utf-8');
if (!content.trim()) {
onProgress?.(relativePath, 'empty');
continue;
}
const chunkCountBefore = documentStore.chunkCount;
await documentStore.addDocuments([
{
content,
source: relativePath,
metadata: {
path: filePath,
extension: path.extname(filePath)
}
}
]);
const chunksAdded = documentStore.chunkCount - chunkCountBefore;
indexed.push({ name: relativePath, chunks: chunksAdded });
onProgress?.(relativePath, 'success', chunksAdded);
} catch {
onProgress?.(relativePath, 'error');
}
}
return indexed;
}
// [end:index-documents]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui Document Q&A${colors.reset}${colors.cyan} │
│ Ask questions about your documents │
│ Type 'exit' to quit │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printUsage(): void {
console.log(`
${colors.yellow}Usage:${colors.reset}
npm run recipe:doc-qa -- <directory>
${colors.yellow}Example:${colors.reset}
npm run recipe:doc-qa -- ./docs
${colors.yellow}Supported files:${colors.reset}
${SUPPORTED_EXTENSIONS.join(', ')}
`);
}The document store automatically:
- Splits content into chunks
- Generates embeddings for each chunk
- Stores chunks for similarity search
The ChatClient automatically:
- Embeds the user's question
- Searches for similar document chunks
- Includes relevant chunks in the prompt
- Generates an answer with context
Configuration
Chunk Settings
typescript
const documentStore = new InMemoryDocumentStore({
embeddingProvider: embeddings,
chunkSize: 500, // Characters per chunk
chunkOverlap: 50 // Overlap between chunks
});| Setting | Default | Description |
|---|---|---|
chunkSize | 500 | Maximum characters per chunk |
chunkOverlap | 50 | Characters to overlap between chunks |
RAG Options
typescript
const client = new ChatClient({
documentStore,
ragOptions: {
topK: 3 // Number of chunks to retrieve
}
});Supported File Types
| Extension | Type |
|---|---|
.md | Markdown |
.txt | Plain text |
Error Handling
Handle different error types gracefully:
typescript
/**
* Document Q&A Library
*
* Exported functions for the Document Q&A recipe.
* Snippet markers allow VitePress to extract code for documentation.
*/
import * as fs from 'fs';
import * as path from 'path';
import {
ChatClient,
OpenAIEmbeddings,
InMemoryDocumentStore,
ChatError,
RateLimitError,
AuthenticationError
} from '../../../src';
// [start:config]
export const SUPPORTED_EXTENSIONS = ['.md', '.txt'];
export const CHUNK_SIZE = 500;
export const CHUNK_OVERLAP = 50;
export const TOP_K = 3;
export const MODEL = 'gpt-4o-mini';
// [end:config]
// [start:colors]
export const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
dim: '\x1b[2m',
cyan: '\x1b[36m',
green: '\x1b[32m',
yellow: '\x1b[33m',
red: '\x1b[31m',
gray: '\x1b[90m'
};
// [end:colors]
export interface IndexedFile {
name: string;
chunks: number;
}
// [start:create-embeddings]
export function createEmbeddings(apiKey: string): OpenAIEmbeddings {
return new OpenAIEmbeddings({ apiKey });
}
// [end:create-embeddings]
// [start:create-document-store]
export function createDocumentStore(embeddings: OpenAIEmbeddings): InMemoryDocumentStore {
return new InMemoryDocumentStore({
embeddingProvider: embeddings,
chunkSize: CHUNK_SIZE,
chunkOverlap: CHUNK_OVERLAP
});
}
// [end:create-document-store]
// [start:create-rag-client]
export function createRAGClient(apiKey: string, documentStore: InMemoryDocumentStore): ChatClient {
return new ChatClient({
provider: 'openai',
model: MODEL,
apiKey,
documentStore,
ragOptions: {
topK: TOP_K
}
});
}
// [end:create-rag-client]
// [start:error-handling]
export function handleError(error: unknown): string {
if (error instanceof AuthenticationError) {
return 'Authentication failed. Check your API key.';
} else if (error instanceof RateLimitError) {
return 'Rate limited. Please wait a moment and try again.';
} else if (error instanceof ChatError) {
return `Error: ${error.message}`;
} else if (error instanceof Error) {
return `Error: ${error.message}`;
} else {
return 'An unexpected error occurred';
}
}
// [end:error-handling]
export async function findDocuments(directory: string): Promise<string[]> {
const files: string[] = [];
async function scanDir(dir: string): Promise<void> {
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (!entry.name.startsWith('.') && entry.name !== 'node_modules') {
await scanDir(fullPath);
}
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (SUPPORTED_EXTENSIONS.includes(ext)) {
files.push(fullPath);
}
}
}
}
await scanDir(directory);
return files.sort();
}
// [start:index-documents]
export async function indexDocuments(
documentStore: InMemoryDocumentStore,
files: string[],
baseDir: string,
onProgress?: (file: string, status: 'success' | 'empty' | 'error', chunks?: number) => void
): Promise<IndexedFile[]> {
const indexed: IndexedFile[] = [];
for (const filePath of files) {
const relativePath = path.relative(baseDir, filePath);
try {
const content = await fs.promises.readFile(filePath, 'utf-8');
if (!content.trim()) {
onProgress?.(relativePath, 'empty');
continue;
}
const chunkCountBefore = documentStore.chunkCount;
await documentStore.addDocuments([
{
content,
source: relativePath,
metadata: {
path: filePath,
extension: path.extname(filePath)
}
}
]);
const chunksAdded = documentStore.chunkCount - chunkCountBefore;
indexed.push({ name: relativePath, chunks: chunksAdded });
onProgress?.(relativePath, 'success', chunksAdded);
} catch {
onProgress?.(relativePath, 'error');
}
}
return indexed;
}
// [end:index-documents]
export async function streamResponse(
client: ChatClient,
message: string,
onChunk: (content: string) => void
): Promise<void> {
for await (const chunk of client.stream(message)) {
onChunk(chunk.content);
}
}
export function printBanner(): void {
console.log(`
${colors.cyan}╭─────────────────────────────────────────────╮
│ ${colors.bright}Meloqui Document Q&A${colors.reset}${colors.cyan} │
│ Ask questions about your documents │
│ Type 'exit' to quit │
╰─────────────────────────────────────────────╯${colors.reset}
`);
}
export function printUsage(): void {
console.log(`
${colors.yellow}Usage:${colors.reset}
npm run recipe:doc-qa -- <directory>
${colors.yellow}Example:${colors.reset}
npm run recipe:doc-qa -- ./docs
${colors.yellow}Supported files:${colors.reset}
${SUPPORTED_EXTENSIONS.join(', ')}
`);
}