All files / src/conversation index.ts

100% Statements 77/77
100% Branches 28/28
100% Functions 16/16
100% Lines 77/77

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161                                                                                                                            78x   78x 78x 78x 78x       1x 26x 26x 26x 26x 26x   26x 23x 23x 23x 73x 73x 23x 23x   26x 12x     1x 12x 9x 9x 1x 12x   26x 26x 24x 24x   26x 7x 7x   26x 2x 2x   26x 2x 2x   26x 12x 12x 12x 12x 12x 12x   26x 5x 5x 5x 5x 5x   26x 2x 2x   26x 2x 2x   26x 6x 6x   26x 2x 2x 2x 2x   26x 1x 1x   26x 4x 4x 4x 26x   26x 26x  
/**
 * AgentKits — Conversation Manager
 *
 * Manage multi-turn conversations with history, token-aware truncation,
 * and system prompt management across providers.
 *
 * Usage:
 *   import { createConversation } from 'agentkits/conversation';
 */
 
// ── Types ──────────────────────────────────────────────────────────
 
export interface ConversationMessage {
  role: 'system' | 'user' | 'assistant' | 'tool';
  content: string;
  name?: string;
  timestamp?: number;
  metadata?: Record<string, any>;
}
 
export interface ConversationConfig {
  /** Maximum tokens for the conversation context */
  maxTokens?: number;
  /** System prompt */
  systemPrompt?: string;
  /** Token counting function. Defaults to rough estimate (chars/4). */
  countTokens?: (text: string) => number;
  /** Truncation strategy */
  truncation?: 'sliding-window' | 'summarize-old' | 'keep-recent';
  /** Number of recent messages to always keep when truncating */
  keepRecent?: number;
}
 
export interface Conversation {
  /** Add a user message */
  addUser(content: string, metadata?: Record<string, any>): void;
  /** Add an assistant message */
  addAssistant(content: string, metadata?: Record<string, any>): void;
  /** Add a tool result message */
  addTool(content: string, name?: string): void;
  /** Set or update the system prompt */
  setSystem(prompt: string): void;
  /** Get messages formatted for API call, with truncation applied */
  getMessages(): ConversationMessage[];
  /** Get all messages without truncation */
  getAllMessages(): ConversationMessage[];
  /** Get estimated token count of current context */
  getTokenCount(): number;
  /** Clear all messages (keeps system prompt) */
  clear(): void;
  /** Get message count (excluding system) */
  readonly length: number;
  /** Fork conversation (create a copy) */
  fork(): Conversation;
  /** Export as JSON */
  toJSON(): { system?: string; messages: ConversationMessage[] };
  /** Import from JSON */
  loadJSON(data: { system?: string; messages: ConversationMessage[] }): void;
}
 
// ── Default token counter ──────────────────────────────────────────
 
function defaultCountTokens(text: string): number {
  // Rough estimate: ~4 chars per token for English, ~2 for CJK
  const cjkChars = (text.match(/[\u4e00-\u9fff\u3040-\u309f\u30a0-\u30ff]/g) || []).length;
  const otherChars = text.length - cjkChars;
  return Math.ceil(otherChars / 4 + cjkChars / 2);
}
 
// ── Factory ────────────────────────────────────────────────────────
 
export function createConversation(config: ConversationConfig = {}): Conversation {
  const maxTokens = config.maxTokens ?? 8192;
  const countTokens = config.countTokens ?? defaultCountTokens;
  const keepRecent = config.keepRecent ?? 4;
  let systemPrompt = config.systemPrompt;
  let messages: ConversationMessage[] = [];
 
  function estimateTokens(msgs: ConversationMessage[]): number {
    let total = 0;
    if (systemPrompt) total += countTokens(systemPrompt) + 4; // role overhead
    for (const m of msgs) {
      total += countTokens(m.content) + 4; // role + formatting overhead
    }
    return total;
  }
 
  function truncate(msgs: ConversationMessage[]): ConversationMessage[] {
    if (estimateTokens(msgs) <= maxTokens) return msgs;
 
    // Keep the most recent messages, drop oldest (after system)
    const result = [...msgs];
    while (result.length > keepRecent && estimateTokens(result) > maxTokens) {
      result.shift();
    }
    return result;
  }
 
  const conv: Conversation = {
    addUser(content, metadata) {
      messages.push({ role: 'user', content, timestamp: Date.now(), metadata });
    },
 
    addAssistant(content, metadata) {
      messages.push({ role: 'assistant', content, timestamp: Date.now(), metadata });
    },
 
    addTool(content, name) {
      messages.push({ role: 'tool', content, name, timestamp: Date.now() });
    },
 
    setSystem(prompt) {
      systemPrompt = prompt;
    },
 
    getMessages() {
      const truncated = truncate(messages);
      const result: ConversationMessage[] = [];
      if (systemPrompt) result.push({ role: 'system', content: systemPrompt });
      result.push(...truncated);
      return result;
    },
 
    getAllMessages() {
      const result: ConversationMessage[] = [];
      if (systemPrompt) result.push({ role: 'system', content: systemPrompt });
      result.push(...messages);
      return result;
    },
 
    getTokenCount() {
      return estimateTokens(messages);
    },
 
    clear() {
      messages = [];
    },
 
    get length() {
      return messages.length;
    },
 
    fork() {
      const forked = createConversation(config);
      forked.loadJSON({ system: systemPrompt, messages: [...messages] });
      return forked;
    },
 
    toJSON() {
      return { system: systemPrompt, messages: [...messages] };
    },
 
    loadJSON(data) {
      if (data.system) systemPrompt = data.system;
      messages = [...data.messages];
    },
  };
 
  return conv;
}