Skip to main content
Build a customer support bot that remembers customer history, past interactions, and preferences. This enables personalized, context-aware support without customers repeating themselves.

Architecture

Key concepts:
  • Per-customer memory: Each customer’s interactions are stored under their user_id
  • Session isolation: Each support ticket uses a unique session_id to keep conversations separate
  • Cross-ticket search: Find relevant past issues across all of a customer’s history

Setup: Support Bot Configuration

Install the EverOS SDK:
pip install everos
Initialize the client and define the bot:
from everos import EverOS
import time

client = EverOS()
memories = client.v1.memories

class SupportBot:
    def __init__(self, bot_name: str = "Support Bot"):
        self.bot_name = bot_name

    def create_ticket(self, customer_id: str, ticket_id: str, subject: str):
        """Initialize a new support ticket."""
        session_id = f"ticket_{ticket_id}"

        # Store ticket creation as the first message
        memories.add(
            user_id=customer_id,
            session_id=session_id,
            messages=[
                {
                    "role": "assistant",
                    "timestamp": int(time.time() * 1000),
                    "content": f"New support ticket created. Subject: {subject}",
                }
            ],
        )

        return session_id

    def store_message(self, customer_id: str, session_id: str, role: str, content: str):
        """Store a message in the ticket."""
        memories.add(
            user_id=customer_id,
            session_id=session_id,
            messages=[
                {
                    "role": role,
                    "timestamp": int(time.time() * 1000),
                    "content": content,
                }
            ],
        )

Customer Message Handling

When a customer sends a message, store it and retrieve relevant context.
def handle_customer_message(
    self,
    customer_id: str,
    session_id: str,
    message: str,
) -> dict:
    """Process customer message and return context for response generation."""

    # 1. Store the customer message
    self.store_message(customer_id, session_id, "user", message)

    # 2. Get context from multiple sources
    context = self._gather_context(customer_id, message)

    return context

def _gather_context(self, customer_id: str, query: str) -> dict:
    """Gather relevant context from EverOS."""

    # Customer profile and preferences
    profile_resp = memories.search(
        filters={"user_id": customer_id},
        query=query,
        method="vector",
        memory_types=["profile"],
        top_k=5,
    )

    # Past conversation history (episodes)
    history_resp = memories.search(
        filters={"user_id": customer_id},
        query=query,
        method="vector",
        memory_types=["episodic_memory"],
        top_k=5,
    )

    return {
        "customer_profile": profile_resp.data.episodes if profile_resp.data else [],
        "past_history": history_resp.data.episodes if history_resp.data else [],
    }

Generate Context-Aware Response

Use retrieved context to generate personalized responses.
def generate_response(self, customer_message: str, context: dict) -> str:
    """Generate response using LLM with memory context."""

    # Format context for LLM
    profile_text = "\n".join([
        f"- {ep.summary}" for ep in context.get("customer_profile", [])
    ]) or "No profile information available."

    history_text = "\n".join([
        f"- {ep.summary}" for ep in context.get("past_history", [])
    ]) or "No relevant past interactions."

    prompt = f"""You are a helpful customer support agent. Use the following context to provide personalized support.

CUSTOMER PROFILE:
{profile_text}

RELEVANT PAST INTERACTIONS:
{history_text}

CUSTOMER MESSAGE:
{customer_message}

Instructions:
- Address the customer by name if known
- Reference past issues if relevant (e.g., "I see you had a similar issue before...")
- Use customer preferences (e.g., communication style, technical level)
- Don't repeat information the customer already provided
- Be concise but thorough

Response:"""

    # Replace with your LLM call
    # response = openai.chat.completions.create(...)
    return "[LLM response here]"

Complete Support Bot Implementation

from everos import EverOS
import time

client = EverOS()
memories = client.v1.memories

class CustomerSupportBot:
    def __init__(self):
        self.bot_name = "Support Bot"

    def create_ticket(self, customer_id: str, ticket_id: str, subject: str) -> str:
        """Create a new support ticket."""
        session_id = f"ticket_{ticket_id}"

        memories.add(
            user_id=customer_id,
            session_id=session_id,
            messages=[
                {
                    "role": "assistant",
                    "timestamp": int(time.time() * 1000),
                    "content": f"Ticket opened: {subject}",
                }
            ],
        )
        return session_id

    def customer_message(self, customer_id: str, ticket_id: str, message: str) -> str:
        """Handle customer message and generate response."""
        session_id = f"ticket_{ticket_id}"

        # Store customer message
        self._store(customer_id, session_id, "user", message)

        # Gather context
        context = self._get_context(customer_id, message)

        # Generate response (implement with your LLM)
        response = self._generate_response(message, context)

        # Store bot response
        self._store(customer_id, session_id, "assistant", response)

        return response

    def close_ticket(self, customer_id: str, ticket_id: str):
        """Flush memories when ticket is closed."""
        memories.flush(
            user_id=customer_id,
            session_id=f"ticket_{ticket_id}",
        )

    def escalate_to_agent(self, customer_id: str, ticket_id: str, agent_name: str) -> dict:
        """Generate handoff context for human agent."""
        resp = memories.search(
            filters={"user_id": customer_id},
            query="issue summary resolution attempts",
            method="vector",
            memory_types=["episodic_memory"],
            top_k=10,
        )
        episodes = resp.data.episodes if resp.data else []

        return {
            "ticket_id": ticket_id,
            "summary": [ep.summary for ep in episodes],
            "handoff_note": f"Escalated to {agent_name}. See memory context for full history.",
        }

    def _store(self, customer_id: str, session_id: str, role: str, content: str):
        """Store a message."""
        memories.add(
            user_id=customer_id,
            session_id=session_id,
            messages=[
                {
                    "role": role,
                    "timestamp": int(time.time() * 1000),
                    "content": content,
                }
            ],
        )

    def _search(self, customer_id: str, query: str, top_k: int = 5, memory_types: list = None) -> list:
        """Search EverOS for relevant memories."""
        resp = memories.search(
            filters={"user_id": customer_id},
            query=query,
            method="vector",
            memory_types=memory_types or ["episodic_memory", "profile"],
            top_k=top_k,
        )
        return resp.data.episodes if resp.data else []

    def _get_context(self, customer_id: str, query: str) -> dict:
        return {
            "profile": self._search(customer_id, query, memory_types=["profile"]),
            "history": self._search(customer_id, query, memory_types=["episodic_memory"]),
        }

    def _generate_response(self, message: str, context: dict) -> str:
        # Implement with your LLM
        profile_summary = "; ".join([ep.summary[:50] for ep in context["profile"]])
        return f"[Response using context: {profile_summary}...]"


# Usage Example
bot = CustomerSupportBot()

# Customer opens a ticket
session = bot.create_ticket(
    customer_id="customer_john",
    ticket_id="T-2024-001",
    subject="Unable to reset password",
)

# Customer sends messages
response1 = bot.customer_message(
    customer_id="customer_john",
    ticket_id="T-2024-001",
    message="I've been trying to reset my password but the email never arrives.",
)
print(f"Bot: {response1}")

response2 = bot.customer_message(
    customer_id="customer_john",
    ticket_id="T-2024-001",
    message="I checked spam folder too. My email is john@example.com",
)
print(f"Bot: {response2}")

# Close ticket when resolved
bot.close_ticket("customer_john", "T-2024-001")

# Escalate if needed
handoff = bot.escalate_to_agent("customer_john", "T-2024-001", "Sarah")
print(f"Handoff context: {handoff}")

Cross-Ticket Intelligence

Search across all of a customer’s history to find patterns.
from everos import EverOS

client = EverOS()
memories = client.v1.memories

def find_recurring_issues(customer_id: str) -> list:
    """Find patterns across customer's support history."""
    resp = memories.search(
        filters={"user_id": customer_id},
        query="issue problem error unable",
        method="vector",
        memory_types=["episodic_memory"],
        top_k=20,
    )
    episodes = resp.data.episodes if resp.data else []
    return episodes

# Find if customer has had similar issues before
history = find_recurring_issues("customer_john")
print(f"Found {len(history)} relevant past interactions")

Best Practices

Always use a unique session_id per ticket to keep conversations separate while building a unified customer profile.
# Good: Ticket-based session IDs
session_id = f"ticket_{ticket_id}"
session_id = f"support_{customer_id}_{ticket_number}"

# Bad: No session isolation
memories.add(user_id=customer_id, messages=[...])  # All tickets mixed
Be mindful of PII when generating LLM prompts.
# Filter sensitive profile memories before LLM
safe_memories = [
    ep for ep in profile_memories
    if not any(term in (ep.summary or "").lower()
              for term in ["ssn", "credit card", "password"])
]
Provide structured context for human agents.
handoff = {
    "ticket_id": ticket_id,
    "customer_summary": "3-year customer, premium tier",
    "issue_summary": "Password reset email not arriving",
    "attempted_solutions": ["Checked spam", "Verified email"],
    "customer_sentiment": "Frustrated but polite",
    "relevant_history": ["Similar issue 6 months ago - spam filter"],
}

Next Steps

AI Tutor

Apply similar patterns to educational contexts

Agentic Retrieval

Use LLM-guided search for complex support queries