Skip to main content

Overview

The Chatbot module provides a Q&A interface that answers user questions using documents stored in a dedicated workspace. It combines conversational AI with RAG (Retrieval-Augmented Generation) to provide accurate, document-based answers. Use Case: Build a chatbot that answers questions based on your company’s knowledge base (policies, FAQs, product documentation, etc.)
Note: The examples in this documentation use a local Flask web server. Be aware of this if you want to deploy to production.
Interface du Chatbot

How It Works

  1. User sends a message
  2. System builds context from conversation history
  3. Query is sent to the Document Search API targeting the chatbot workspace
  4. LLM generates answer based on relevant document content
  5. Conversation is stored for context in future messages

API Endpoints Used

EndpointPurpose
POST /chat/document-searchQuery documents and generate answers

Step-by-Step Implementation

Step 1: Initialize the Service

import requests
from typing import Dict, Any, Optional, List

class ChatbotService:
    def __init__(self, api_key: str):
        self.api_key = api_key
        self.headers = {
            "Authorization": f"Bearer {api_key}",
            "Content-Type": "application/json"
        }
        self.conversations = {}  # In-memory conversation storage

        # Configuration
        self.document_search_url = "https://paradigm.lighton.ai/api/v2/chat/document-search"
        self.workspace_id = 250  # Your chatbot workspace ID
        self.model = "alfred-4.2"

Step 2: Build Conversation Context

def _build_context(
    self,
    message: str,
    conversation_id: Optional[str],
    system_prompt: Optional[str]
) -> str:
    """Build query context from conversation history."""
    context = ""

    # Add system prompt if provided
    if system_prompt:
        context += f"{system_prompt}\n\n"

    # Add conversation history if exists
    if conversation_id and conversation_id in self.conversations:
        context += "Previous conversation:\n"
        for msg in self.conversations[conversation_id]:
            role = msg["role"].capitalize()
            content = msg["content"]
            context += f"{role}: {content}\n"
        context += "\n"

    return context

Step 3: Send Chat Message

def chat(
    self,
    message: str,
    conversation_id: Optional[str] = None,
    system_prompt: Optional[str] = None,
    temperature: float = 0.7
) -> Dict[str, Any]:
    """
    Send chat message and get response using document search.

    Args:
        message: User's question
        conversation_id: Optional ID for conversation continuity
        system_prompt: Optional instructions for the assistant
        temperature: Response creativity (0.0-1.0)

    Returns:
        Response with answer and conversation_id
    """
    # Build context from history
    context = self._build_context(message, conversation_id, system_prompt)

    # Build the query
    query = f"{context}Current question: {message}"

    # Prepare payload for Document Search
    payload = {
        "model": self.model,
        "query": query,
        "workspace_ids": [self.workspace_id],
        "company_scope": False,
        "private_scope": False,
        "tool": "DocumentSearch"
    }

    # Make API call
    response = requests.post(
        self.document_search_url,
        headers=self.headers,
        json=payload,
        timeout=150
    )

    if response.status_code == 200:
        data = response.json()
        assistant_message = data.get("answer", "")

        # Generate conversation ID if not provided
        if not conversation_id:
            import uuid
            conversation_id = str(uuid.uuid4())
            self.conversations[conversation_id] = []

        # Store conversation history
        if conversation_id in self.conversations:
            self.conversations[conversation_id].append({
                "role": "user",
                "content": message
            })
            self.conversations[conversation_id].append({
                "role": "assistant",
                "content": assistant_message
            })

        return {
            'response': assistant_message,
            'conversation_id': conversation_id,
            'status': 'success',
            'workspace_id': self.workspace_id
        }
    else:
        raise Exception(f"Document search failed: {response.text}")

Step 4: Manage Conversation History

def get_conversation_history(self, conversation_id: str) -> List[Dict[str, str]]:
    """Get conversation history for a given ID."""
    if conversation_id not in self.conversations:
        raise ValueError(f"Conversation {conversation_id} not found")
    return self.conversations[conversation_id]

def clear_conversation(self, conversation_id: str) -> bool:
    """Clear conversation history."""
    if conversation_id in self.conversations:
        del self.conversations[conversation_id]
        return True
    return False

Complete Usage Example

# Initialize chatbot
chatbot = ChatbotService(api_key="your-api-key")

# System prompt for the chatbot
system_prompt = """You are a helpful IT support assistant.
Answer questions based on the company's IT documentation.
Be concise and provide step-by-step instructions when relevant."""

# First message - starts new conversation
result = chatbot.chat(
    message="How do I reset my password?",
    system_prompt=system_prompt
)

print(f"Assistant: {result['response']}")
conversation_id = result['conversation_id']

# Follow-up message - uses conversation context
result = chatbot.chat(
    message="What if I don't receive the reset email?",
    conversation_id=conversation_id,
    system_prompt=system_prompt
)

print(f"Assistant: {result['response']}")

# Get full conversation history
history = chatbot.get_conversation_history(conversation_id)
for msg in history:
    print(f"{msg['role'].upper()}: {msg['content']}")

# Clear conversation when done
chatbot.clear_conversation(conversation_id)

Flask Route Integration

from flask import Blueprint, request, jsonify

chatbot_bp = Blueprint('chatbot', __name__)
chatbot_service = None

def init_chatbot_service(api_key: str):
    global chatbot_service
    chatbot_service = ChatbotService(api_key)

@chatbot_bp.route('/chat', methods=['POST'])
def chat():
    """Chat endpoint."""
    data = request.json

    message = data.get('message')
    conversation_id = data.get('conversation_id')
    system_prompt = data.get('system_prompt')

    if not message:
        return jsonify({'error': 'Message is required'}), 400

    try:
        result = chatbot_service.chat(
            message=message,
            conversation_id=conversation_id,
            system_prompt=system_prompt
        )
        return jsonify(result)
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@chatbot_bp.route('/conversation/<conversation_id>', methods=['GET'])
def get_history(conversation_id):
    """Get conversation history."""
    try:
        history = chatbot_service.get_conversation_history(conversation_id)
        return jsonify({'history': history})
    except ValueError as e:
        return jsonify({'error': str(e)}), 404

@chatbot_bp.route('/conversation/<conversation_id>', methods=['DELETE'])
def clear_history(conversation_id):
    """Clear conversation history."""
    result = chatbot_service.clear_conversation(conversation_id)
    return jsonify({'cleared': result})

Prerequisites

Before using this module:
  1. Create a workspace for your chatbot documents
  2. Upload documents to the workspace using the Upload Session API
  3. Wait for embedding - documents must be fully embedded before querying
  4. Note the workspace_id - you’ll need it for the service configuration

Configuration

ParameterDescriptionDefault
workspace_idID of the workspace containing chatbot documentsRequired
modelLLM model to usealfred-4.2
timeoutAPI request timeout in seconds150

Best Practices

  1. Use system prompts to define the chatbot’s personality and behavior
  2. Store conversations persistently in production (database, Redis, etc.)
  3. Limit conversation history to avoid token limits (keep last 10-20 messages)
  4. Handle errors gracefully with user-friendly messages
  5. Keep documents updated in the workspace for accurate answers