Core components: - FastAPI server with health endpoints - AI client (Anthropic Claude integration) - Context manager (loads company knowledge) - Chat API (non-streaming and streaming) - Requirements and environment setup Ready to run: python backend/main.py
97 lines
2.9 KiB
Python
97 lines
2.9 KiB
Python
"""
|
|
AI Client - Anthropic Claude Integration
|
|
"""
|
|
|
|
import anthropic
|
|
import os
|
|
import logging
|
|
from typing import List, Dict, Optional
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class AIClient:
|
|
"""Client for interacting with Anthropic Claude API"""
|
|
|
|
def __init__(self, api_key: str):
|
|
self.client = anthropic.Anthropic(api_key=api_key)
|
|
self.model = os.getenv("AI_MODEL", "claude-sonnet-4-5-20250514")
|
|
self.max_tokens = int(os.getenv("AI_MAX_TOKENS", "4096"))
|
|
self.temperature = float(os.getenv("AI_TEMPERATURE", "0.7"))
|
|
logger.info(f"AIClient initialized with model: {self.model}")
|
|
|
|
async def chat(
|
|
self,
|
|
messages: List[Dict[str, str]],
|
|
system_prompt: Optional[str] = None,
|
|
max_tokens: Optional[int] = None
|
|
) -> str:
|
|
"""
|
|
Send chat messages to Claude and get response
|
|
|
|
Args:
|
|
messages: List of message dicts with 'role' and 'content'
|
|
system_prompt: Optional system prompt
|
|
max_tokens: Optional max tokens override
|
|
|
|
Returns:
|
|
Response text from Claude
|
|
"""
|
|
try:
|
|
kwargs = {
|
|
"model": self.model,
|
|
"max_tokens": max_tokens or self.max_tokens,
|
|
"temperature": self.temperature,
|
|
"messages": messages
|
|
}
|
|
|
|
if system_prompt:
|
|
kwargs["system"] = system_prompt
|
|
|
|
response = self.client.messages.create(**kwargs)
|
|
|
|
# Extract text from response
|
|
if response.content and len(response.content) > 0:
|
|
return response.content[0].text
|
|
|
|
return "No response generated"
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error calling Claude API: {e}")
|
|
raise
|
|
|
|
async def chat_stream(
|
|
self,
|
|
messages: List[Dict[str, str]],
|
|
system_prompt: Optional[str] = None,
|
|
max_tokens: Optional[int] = None
|
|
):
|
|
"""
|
|
Stream chat response from Claude
|
|
|
|
Args:
|
|
messages: List of message dicts
|
|
system_prompt: Optional system prompt
|
|
max_tokens: Optional max tokens override
|
|
|
|
Yields:
|
|
Text chunks from Claude
|
|
"""
|
|
try:
|
|
kwargs = {
|
|
"model": self.model,
|
|
"max_tokens": max_tokens or self.max_tokens,
|
|
"temperature": self.temperature,
|
|
"messages": messages
|
|
}
|
|
|
|
if system_prompt:
|
|
kwargs["system"] = system_prompt
|
|
|
|
with self.client.messages.stream(**kwargs) as stream:
|
|
for text in stream.text_stream:
|
|
yield text
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error streaming from Claude API: {e}")
|
|
raise
|