Some checks failed
CI - SharePoint Plugin with SonarQube / Test and SonarQube Analysis (push) Has been cancelled
271 lines
9.3 KiB
Python
271 lines
9.3 KiB
Python
"""
|
|
Unit tests for llm_client.py
|
|
"""
|
|
import pytest
|
|
from unittest.mock import Mock, patch, MagicMock
|
|
from llm_client import (
|
|
OllamaClient,
|
|
OpenAIClient,
|
|
AnthropicClient,
|
|
create_llm_client
|
|
)
|
|
|
|
|
|
class TestOllamaClient:
|
|
"""Test OllamaClient class."""
|
|
|
|
def setup_method(self):
|
|
"""Set up test fixtures."""
|
|
self.client = OllamaClient(
|
|
base_url="http://localhost:11434",
|
|
model="llama3.2",
|
|
timeout=30
|
|
)
|
|
|
|
def test_initialization(self):
|
|
"""Test client initialization."""
|
|
assert self.client.base_url == "http://localhost:11434"
|
|
assert self.client.model == "llama3.2"
|
|
assert self.client.timeout == 30
|
|
|
|
def test_initialization_strips_trailing_slash(self):
|
|
"""Test base_url trailing slash is removed."""
|
|
client = OllamaClient(base_url="http://localhost:11434/")
|
|
assert client.base_url == "http://localhost:11434"
|
|
|
|
@patch('llm_client.requests.post')
|
|
def test_chat_without_context(self, mock_post):
|
|
"""Test chat method without context."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"message": {"content": "Hello! How can I help?"}
|
|
}
|
|
mock_post.return_value = mock_response
|
|
|
|
messages = [{"role": "user", "content": "Hi"}]
|
|
response = self.client.chat(messages)
|
|
|
|
assert response == "Hello! How can I help?"
|
|
mock_post.assert_called_once()
|
|
|
|
@patch('llm_client.requests.post')
|
|
def test_chat_with_context(self, mock_post):
|
|
"""Test chat method with context."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"message": {"content": "Based on the document..."}
|
|
}
|
|
mock_post.return_value = mock_response
|
|
|
|
messages = [{"role": "user", "content": "What does it say?"}]
|
|
context = "This is a test document."
|
|
response = self.client.chat(messages, context=context)
|
|
|
|
assert response == "Based on the document..."
|
|
|
|
# Verify system message with context was added
|
|
call_args = mock_post.call_args
|
|
sent_messages = call_args[1]['json']['messages']
|
|
assert sent_messages[0]['role'] == 'system'
|
|
assert 'test document' in sent_messages[0]['content']
|
|
|
|
@patch('llm_client.requests.post')
|
|
def test_chat_handles_api_error(self, mock_post):
|
|
"""Test chat handles API errors."""
|
|
import requests
|
|
mock_post.side_effect = requests.exceptions.RequestException("Connection failed")
|
|
|
|
messages = [{"role": "user", "content": "Hi"}]
|
|
|
|
with pytest.raises(Exception, match="Ollama API error"):
|
|
self.client.chat(messages)
|
|
|
|
@patch('llm_client.requests.get')
|
|
def test_is_available_success(self, mock_get):
|
|
"""Test is_available returns True when service is up."""
|
|
mock_response = Mock()
|
|
mock_response.status_code = 200
|
|
mock_get.return_value = mock_response
|
|
|
|
assert self.client.is_available() is True
|
|
|
|
@patch('llm_client.requests.get')
|
|
def test_is_available_failure(self, mock_get):
|
|
"""Test is_available returns False when service is down."""
|
|
mock_get.side_effect = Exception("Connection refused")
|
|
|
|
assert self.client.is_available() is False
|
|
|
|
|
|
class TestOpenAIClient:
|
|
"""Test OpenAIClient class."""
|
|
|
|
def setup_method(self):
|
|
"""Set up test fixtures."""
|
|
self.client = OpenAIClient(
|
|
api_key="sk-test123",
|
|
model="gpt-4",
|
|
timeout=30
|
|
)
|
|
|
|
def test_initialization(self):
|
|
"""Test client initialization."""
|
|
assert self.client.api_key == "sk-test123"
|
|
assert self.client.model == "gpt-4"
|
|
assert self.client.timeout == 30
|
|
|
|
@patch('llm_client.requests.post')
|
|
def test_chat_without_context(self, mock_post):
|
|
"""Test chat method without context."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"choices": [
|
|
{"message": {"content": "Hello from GPT-4!"}}
|
|
]
|
|
}
|
|
mock_post.return_value = mock_response
|
|
|
|
messages = [{"role": "user", "content": "Hi"}]
|
|
response = self.client.chat(messages)
|
|
|
|
assert response == "Hello from GPT-4!"
|
|
|
|
@patch('llm_client.requests.post')
|
|
def test_chat_with_context(self, mock_post):
|
|
"""Test chat method with context."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"choices": [
|
|
{"message": {"content": "Based on the context..."}}
|
|
]
|
|
}
|
|
mock_post.return_value = mock_response
|
|
|
|
messages = [{"role": "user", "content": "Summarize"}]
|
|
context = "Test document content"
|
|
response = self.client.chat(messages, context=context)
|
|
|
|
assert response == "Based on the context..."
|
|
|
|
@patch('llm_client.requests.get')
|
|
def test_is_available_success(self, mock_get):
|
|
"""Test is_available returns True with valid API key."""
|
|
mock_response = Mock()
|
|
mock_response.status_code = 200
|
|
mock_get.return_value = mock_response
|
|
|
|
assert self.client.is_available() is True
|
|
|
|
@patch('llm_client.requests.get')
|
|
def test_is_available_failure(self, mock_get):
|
|
"""Test is_available returns False with invalid API key."""
|
|
mock_get.side_effect = Exception("Unauthorized")
|
|
|
|
assert self.client.is_available() is False
|
|
|
|
|
|
class TestAnthropicClient:
|
|
"""Test AnthropicClient class."""
|
|
|
|
def setup_method(self):
|
|
"""Set up test fixtures."""
|
|
self.client = AnthropicClient(
|
|
api_key="sk-ant-test123",
|
|
model="claude-3-5-sonnet-20241022",
|
|
timeout=30
|
|
)
|
|
|
|
def test_initialization(self):
|
|
"""Test client initialization."""
|
|
assert self.client.api_key == "sk-ant-test123"
|
|
assert self.client.model == "claude-3-5-sonnet-20241022"
|
|
assert self.client.timeout == 30
|
|
|
|
@patch('llm_client.requests.post')
|
|
def test_chat_without_context(self, mock_post):
|
|
"""Test chat method without context."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"content": [{"text": "Hello from Claude!"}]
|
|
}
|
|
mock_post.return_value = mock_response
|
|
|
|
messages = [{"role": "user", "content": "Hi"}]
|
|
response = self.client.chat(messages)
|
|
|
|
assert response == "Hello from Claude!"
|
|
|
|
@patch('llm_client.requests.post')
|
|
def test_chat_with_context(self, mock_post):
|
|
"""Test chat method with context."""
|
|
mock_response = Mock()
|
|
mock_response.json.return_value = {
|
|
"content": [{"text": "Based on the document..."}]
|
|
}
|
|
mock_post.return_value = mock_response
|
|
|
|
messages = [{"role": "user", "content": "Summarize"}]
|
|
context = "Test document"
|
|
response = self.client.chat(messages, context=context)
|
|
|
|
assert response == "Based on the document..."
|
|
|
|
# Verify system prompt was added
|
|
call_args = mock_post.call_args
|
|
sent_payload = call_args[1]['json']
|
|
assert 'system' in sent_payload
|
|
assert 'Test document' in sent_payload['system']
|
|
|
|
def test_is_available_with_api_key(self):
|
|
"""Test is_available returns True when API key exists."""
|
|
assert self.client.is_available() is True
|
|
|
|
def test_is_available_without_api_key(self):
|
|
"""Test is_available returns False without API key."""
|
|
client = AnthropicClient(api_key="")
|
|
assert client.is_available() is False
|
|
|
|
|
|
class TestCreateLLMClient:
|
|
"""Test create_llm_client factory function."""
|
|
|
|
def test_create_ollama_client(self):
|
|
"""Test creating Ollama client."""
|
|
client = create_llm_client("ollama", model="llama3.2")
|
|
assert isinstance(client, OllamaClient)
|
|
assert client.model == "llama3.2"
|
|
|
|
def test_create_openai_client(self):
|
|
"""Test creating OpenAI client."""
|
|
client = create_llm_client("openai", api_key="sk-test", model="gpt-4")
|
|
assert isinstance(client, OpenAIClient)
|
|
assert client.model == "gpt-4"
|
|
|
|
def test_create_openai_client_without_key(self):
|
|
"""Test creating OpenAI client without API key raises error."""
|
|
with pytest.raises(ValueError, match="OpenAI API key required"):
|
|
create_llm_client("openai")
|
|
|
|
def test_create_anthropic_client(self):
|
|
"""Test creating Anthropic client."""
|
|
client = create_llm_client("anthropic", api_key="sk-ant-test")
|
|
assert isinstance(client, AnthropicClient)
|
|
|
|
def test_create_anthropic_client_without_key(self):
|
|
"""Test creating Anthropic client without API key raises error."""
|
|
with pytest.raises(ValueError, match="Anthropic API key required"):
|
|
create_llm_client("anthropic")
|
|
|
|
def test_create_unknown_provider(self):
|
|
"""Test creating client with unknown provider raises error."""
|
|
with pytest.raises(ValueError, match="Unknown LLM provider"):
|
|
create_llm_client("unknown_provider")
|
|
|
|
@patch.dict('os.environ', {'OLLAMA_URL': 'http://custom:11434', 'OLLAMA_MODEL': 'mistral'})
|
|
def test_create_ollama_with_env_vars(self):
|
|
"""Test creating Ollama client with environment variables."""
|
|
client = create_llm_client("ollama")
|
|
assert isinstance(client, OllamaClient)
|
|
assert client.base_url == "http://custom:11434"
|
|
assert client.model == "mistral"
|