Skip to content

Commit

Permalink
Resolve merge from main
Browse files Browse the repository at this point in the history
  • Loading branch information
crickman committed Sep 19, 2024
2 parents 6e76d5a + 6bba2b6 commit b8e3861
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 20 deletions.
4 changes: 0 additions & 4 deletions .github/workflows/python-integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,6 @@ jobs:
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_TEXT_DEPLOYMENT_NAME }}
AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_TOKEN_ENDPOINT: ${{ vars.AZURE_OPENAI_TOKEN_ENDPOINT }}
BING_API_KEY: ${{ secrets.BING_API_KEY }}
OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI_CHAT_MODEL_ID }}
OPENAI_TEXT_MODEL_ID: ${{ vars.OPENAI_TEXT_MODEL_ID }}
Expand Down Expand Up @@ -258,8 +256,6 @@ jobs:
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_TEXT_DEPLOYMENT_NAME }}
AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_TOKEN_ENDPOINT: ${{ vars.AZURE_OPENAI_TOKEN_ENDPOINT }}
BING_API_KEY: ${{ secrets.BING_API_KEY }}
OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI_CHAT_MODEL_ID }}
OPENAI_TEXT_MODEL_ID: ${{ vars.OPENAI_TEXT_MODEL_ID }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ private async Task InvokeAgentAsync(OpenAIAssistantAgent agent, string threadId,
ChatHistory history = [];

bool isFirst = false;
await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(threadId, history))
await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(threadId, messages: history))
{
if (string.IsNullOrEmpty(response.Content))
{
Expand Down
8 changes: 4 additions & 4 deletions dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ public static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamin
OpenAIAssistantAgent agent,
AssistantClient client,
string threadId,
IList<ChatMessageContent> messages,
IList<ChatMessageContent>? messages,
OpenAIAssistantInvocationOptions? invocationOptions,
ILogger logger,
Kernel kernel,
Expand Down Expand Up @@ -416,7 +416,7 @@ public static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamin
if (functionCalls.Length > 0)
{
// Emit function-call content
messages.Add(GenerateFunctionCallContent(agent.GetName(), functionCalls));
messages?.Add(GenerateFunctionCallContent(agent.GetName(), functionCalls));

// Invoke functions for each tool-step
IEnumerable<Task<FunctionResultContent>> functionResultTasks = ExecuteFunctionSteps(agent, functionCalls, cancellationToken);
Expand All @@ -428,7 +428,7 @@ public static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamin
ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
asyncUpdates = client.SubmitToolOutputsToRunStreamingAsync(run, toolOutputs);

messages.Add(GenerateFunctionResultContent(agent.GetName(), functionResults));
messages?.Add(GenerateFunctionResultContent(agent.GetName(), functionResults));
}
}

Expand All @@ -443,7 +443,7 @@ public static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamin
if (message != null)
{
ChatMessageContent content = GenerateMessageContent(agent.GetName(), message);
messages.Add(content);
messages?.Add(content);
}
}

Expand Down
10 changes: 5 additions & 5 deletions dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -358,41 +358,41 @@ public async IAsyncEnumerable<ChatMessageContent> InvokeAsync(
/// Invoke the assistant on the specified thread with streaming response.
/// </summary>
/// <param name="threadId">The thread identifier</param>
/// <param name="messages">The receiver for the completed messages generated</param>
/// <param name="arguments">Optional arguments to pass to the agents's invocation, including any <see cref="PromptExecutionSettings"/>.</param>
/// <param name="kernel">The <see cref="Kernel"/> containing services, plugins, and other state for use by the agent.</param>
/// <param name="messages">Optional receiver of the completed messages generated</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>Asynchronous enumeration of messages.</returns>
/// <remarks>
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
/// </remarks>
public IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamingAsync(
string threadId,
ChatHistory messages,
KernelArguments? arguments = null,
Kernel? kernel = null,
ChatHistory? messages = null,
CancellationToken cancellationToken = default)
=> this.InvokeStreamingAsync(threadId, messages, options: null, arguments, kernel, cancellationToken);
=> this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken);

/// <summary>
/// Invoke the assistant on the specified thread with streaming response.
/// </summary>
/// <param name="threadId">The thread identifier</param>
/// <param name="messages">The receiver for the completed messages generated</param>
/// <param name="options">Optional invocation options</param>
/// <param name="arguments">Optional arguments to pass to the agents's invocation, including any <see cref="PromptExecutionSettings"/>.</param>
/// <param name="kernel">The <see cref="Kernel"/> containing services, plugins, and other state for use by the agent.</param>
/// <param name="messages">Optional receiver of the completed messages generated</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>Asynchronous enumeration of messages.</returns>
/// <remarks>
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
/// </remarks>
public IAsyncEnumerable<StreamingChatMessageContent> InvokeStreamingAsync(
string threadId,
ChatHistory messages,
OpenAIAssistantInvocationOptions? options,
KernelArguments? arguments = null,
Kernel? kernel = null,
ChatHistory? messages = null,
CancellationToken cancellationToken = default)
{
this.ThrowIfDeleted();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ public sealed class RedisHashSetVectorStoreRecordCollectionOptions<TRecord>
{
/// <summary>
/// Gets or sets a value indicating whether the collection name should be prefixed to the
/// key names before reading or writing to the Redis store. Default is false.
/// key names before reading or writing to the Redis store. Default is true.
/// </summary>
/// <remarks>
/// For a record to be indexed by a specific Redis index, the key name must be prefixed with the matching prefix configured on the Redis index.
/// You can either pass in keys that are already prefixed, or set this option to true to have the collection name prefixed to the key names automatically.
/// </remarks>
public bool PrefixCollectionNameToKeyNames { get; init; } = false;
public bool PrefixCollectionNameToKeyNames { get; init; } = true;

/// <summary>
/// Gets or sets an optional custom mapper to use when converting between the data model and the Redis record.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,15 +94,23 @@ def __init__(
if not azure_openai_settings.chat_deployment_name:
raise ServiceInitializationError("chat_deployment_name is required.")

# If the api_key is none, and the ad_token is none, and the ad_token_provider is none,
# If the async_client is None, the api_key is none, the ad_token is none, and the ad_token_provider is none,
# then we will attempt to get the ad_token using the default endpoint specified in the Azure OpenAI settings.
if api_key is None and ad_token_provider is None and azure_openai_settings.token_endpoint and ad_token is None:
if (
async_client is None
and azure_openai_settings.api_key is None
and ad_token_provider is None
and ad_token is None
and azure_openai_settings.token_endpoint
):
ad_token = azure_openai_settings.get_azure_openai_auth_token(
token_endpoint=azure_openai_settings.token_endpoint
)

if not azure_openai_settings.api_key and not ad_token and not ad_token_provider:
raise ServiceInitializationError("Please provide either api_key, ad_token or ad_token_provider")
if not async_client and not azure_openai_settings.api_key and not ad_token and not ad_token_provider:
raise ServiceInitializationError(
"Please provide either a custom client, or an api_key, an ad_token or an ad_token_provider"
)

super().__init__(
deployment_name=azure_openai_settings.chat_deployment_name,
Expand Down

0 comments on commit b8e3861

Please sign in to comment.