Skip to content

Commit

Permalink
Python: #6501 Increase anthropic chat completion test coverage (#8388)
Browse files Browse the repository at this point in the history
### Motivation and Context
Increase Anthropic Chat Completion code coverage to 100%
(#6501)

### Description
Match the mocked API responses to realistic examples.

### Contribution Checklist
- [x] The code builds clean without any errors or warnings
- [x] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [x] All unit tests pass, and I have added new tests where possible
- [x] I didn't break anyone 😄
  • Loading branch information
andrewldesousa committed Aug 28, 2024
1 parent 06f3471 commit 53dcf39
Showing 1 changed file with 116 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,21 @@

import pytest
from anthropic import AsyncAnthropic
from anthropic.lib.streaming import TextEvent
from anthropic.types import (
ContentBlockStopEvent,
Message,
MessageDeltaUsage,
MessageStopEvent,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawMessageDeltaEvent,
RawMessageStartEvent,
TextBlock,
TextDelta,
Usage,
)
from anthropic.types.raw_message_delta_event import Delta

from semantic_kernel.connectors.ai.anthropic.prompt_execution_settings.anthropic_prompt_execution_settings import (
AnthropicChatPromptExecutionSettings,
Expand All @@ -12,6 +27,7 @@
from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import (
OpenAIChatPromptExecutionSettings,
)
from semantic_kernel.contents.chat_history import ChatHistory
from semantic_kernel.contents.chat_message_content import ChatMessageContent
from semantic_kernel.exceptions import ServiceInitializationError, ServiceResponseException
from semantic_kernel.functions.kernel_arguments import KernelArguments
Expand All @@ -26,43 +42,120 @@ def mock_settings() -> AnthropicChatPromptExecutionSettings:
@pytest.fixture
def mock_anthropic_client_completion() -> AsyncAnthropic:
client = MagicMock(spec=AsyncAnthropic)

chat_completion_response = AsyncMock()

content = [MagicMock(finish_reason="stop", message=MagicMock(role="assistant", content="Test"))]
chat_completion_response.content = content
chat_completion_response.content = [TextBlock(text="Hello! It's nice to meet you.", type="text")]
chat_completion_response.id = "test_id"
chat_completion_response.model = "claude-3-opus-20240229"
chat_completion_response.role = "assistant"
chat_completion_response.stop_reason = "end_turn"
chat_completion_response.stop_sequence = None
chat_completion_response.type = "message"
chat_completion_response.usage = Usage(input_tokens=114, output_tokens=75)

# Create a MagicMock for the messages attribute
messages_mock = MagicMock()
messages_mock.create = chat_completion_response
messages_mock.create = AsyncMock(return_value=chat_completion_response)

# Assign the messages_mock to the client.messages attribute
client.messages = messages_mock

return client


@pytest.fixture
def mock_anthropic_client_completion_stream() -> AsyncAnthropic:
client = MagicMock(spec=AsyncAnthropic)
chat_completion_response = MagicMock()

content = [
MagicMock(finish_reason="stop", delta=MagicMock(role="assistant", content="Test")),
MagicMock(finish_reason="stop", delta=MagicMock(role="assistant", content="Test", tool_calls=None)),
# Create MagicMock instances for each event with the spec set to the appropriate class
mock_raw_message_start_event = MagicMock(spec=RawMessageStartEvent)
mock_raw_message_start_event.message = MagicMock(spec=Message)
mock_raw_message_start_event.message.id = "test_message_id"
mock_raw_message_start_event.message.content = []
mock_raw_message_start_event.message.model = "claude-3-opus-20240229"
mock_raw_message_start_event.message.role = "assistant"
mock_raw_message_start_event.message.stop_reason = None
mock_raw_message_start_event.message.stop_sequence = None
mock_raw_message_start_event.message.type = "message"
mock_raw_message_start_event.message.usage = MagicMock(spec=Usage)
mock_raw_message_start_event.message.usage.input_tokens = 41
mock_raw_message_start_event.message.usage.output_tokens = 3
mock_raw_message_start_event.type = "message_start"

mock_raw_content_block_start_event = MagicMock(spec=RawContentBlockStartEvent)
mock_raw_content_block_start_event.content_block = MagicMock(spec=TextBlock)
mock_raw_content_block_start_event.content_block.text = ""
mock_raw_content_block_start_event.content_block.type = "text"
mock_raw_content_block_start_event.index = 0
mock_raw_content_block_start_event.type = "content_block_start"

mock_raw_content_block_delta_event = MagicMock(spec=RawContentBlockDeltaEvent)
mock_raw_content_block_delta_event.delta = MagicMock(spec=TextDelta)
mock_raw_content_block_delta_event.delta.text = "Hello! It"
mock_raw_content_block_delta_event.delta.type = "text_delta"
mock_raw_content_block_delta_event.index = 0
mock_raw_content_block_delta_event.type = "content_block_delta"

mock_text_event = MagicMock(spec=TextEvent)
mock_text_event.type = "text"
mock_text_event.text = "Hello! It"
mock_text_event.snapshot = "Hello! It"

mock_content_block_stop_event = MagicMock(spec=ContentBlockStopEvent)
mock_content_block_stop_event.index = 0
mock_content_block_stop_event.type = "content_block_stop"
mock_content_block_stop_event.content_block = MagicMock(spec=TextBlock)
mock_content_block_stop_event.content_block.text = "Hello! It's nice to meet you."
mock_content_block_stop_event.content_block.type = "text"

mock_raw_message_delta_event = MagicMock(spec=RawMessageDeltaEvent)
mock_raw_message_delta_event.delta = MagicMock(spec=Delta)
mock_raw_message_delta_event.delta.stop_reason = "end_turn"
mock_raw_message_delta_event.delta.stop_sequence = None
mock_raw_message_delta_event.type = "message_delta"
mock_raw_message_delta_event.usage = MagicMock(spec=MessageDeltaUsage)
mock_raw_message_delta_event.usage.output_tokens = 84

mock_message_stop_event = MagicMock(spec=MessageStopEvent)
mock_message_stop_event.type = "message_stop"
mock_message_stop_event.message = MagicMock(spec=Message)
mock_message_stop_event.message.id = "test_message_stop_id"
mock_message_stop_event.message.content = [MagicMock(spec=TextBlock)]
mock_message_stop_event.message.content[0].text = "Hello! It's nice to meet you."
mock_message_stop_event.message.content[0].type = "text"
mock_message_stop_event.message.model = "claude-3-opus-20240229"
mock_message_stop_event.message.role = "assistant"
mock_message_stop_event.message.stop_reason = "end_turn"
mock_message_stop_event.message.stop_sequence = None
mock_message_stop_event.message.type = "message"
mock_message_stop_event.message.usage = MagicMock(spec=Usage)
mock_message_stop_event.message.usage.input_tokens = 41
mock_message_stop_event.message.usage.output_tokens = 84

# Combine all mock events into a list
stream_events = [
mock_raw_message_start_event,
mock_raw_content_block_start_event,
mock_raw_content_block_delta_event,
mock_text_event,
mock_content_block_stop_event,
mock_raw_message_delta_event,
mock_message_stop_event,
]
chat_completion_response.content = content

chat_completion_response_empty = MagicMock()
chat_completion_response_empty.content = []
async def async_generator():
for event in stream_events:
yield event

# Create an AsyncMock for the stream
stream_mock = AsyncMock()
stream_mock.__aenter__.return_value = async_generator()

# Create a MagicMock for the messages attribute
messages_mock = MagicMock()
messages_mock.stream = chat_completion_response

generator_mock = MagicMock()
generator_mock.__aiter__.return_value = [chat_completion_response_empty, chat_completion_response]

messages_mock.stream.return_value = stream_mock

client.messages = messages_mock

return client


Expand All @@ -72,7 +165,10 @@ async def test_complete_chat_contents(
mock_settings: AnthropicChatPromptExecutionSettings,
mock_anthropic_client_completion: AsyncAnthropic,
):
chat_history = MagicMock()
chat_history = ChatHistory()
chat_history.add_user_message("test_user_message")
chat_history.add_assistant_message("test_assistant_message")

arguments = KernelArguments()
chat_completion_base = AnthropicChatCompletion(
ai_model_id="test_model_id", service_id="test", api_key="", async_client=mock_anthropic_client_completion
Expand All @@ -81,6 +177,7 @@ async def test_complete_chat_contents(
content: list[ChatMessageContent] = await chat_completion_base.get_chat_message_contents(
chat_history=chat_history, settings=mock_settings, kernel=kernel, arguments=arguments
)

assert content is not None


Expand Down

0 comments on commit 53dcf39

Please sign in to comment.