Skip to content

Commit

Permalink
Merge pull request #487 from betalgo/dev
Browse files Browse the repository at this point in the history
v7.4.5
  • Loading branch information
kayhantolga authored Feb 1, 2024
2 parents 67f39b3 + eb8e406 commit a56950d
Show file tree
Hide file tree
Showing 10 changed files with 144 additions and 31 deletions.
3 changes: 2 additions & 1 deletion OpenAI.SDK/Interfaces/IChatCompletionService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,10 @@ public interface IChatCompletionService
/// </summary>
/// <param name="modelId">The ID of the model to use for this request</param>
/// <param name="chatCompletionCreate"></param>
/// <param name="justDataMode">Ignore stream lines if they don’t start with "data:". If you don't know what it means, probably you shouldn't change this.</param>
/// <param name="cancellationToken">Propagates notification that operations should be canceled.</param>
/// <returns></returns>
IAsyncEnumerable<ChatCompletionCreateResponse> CreateCompletionAsStream(ChatCompletionCreateRequest chatCompletionCreate, string? modelId = null, CancellationToken cancellationToken = default);
IAsyncEnumerable<ChatCompletionCreateResponse> CreateCompletionAsStream(ChatCompletionCreateRequest chatCompletionCreate, string? modelId = null, bool justDataMode = true,CancellationToken cancellationToken = default);
}

public static class IChatCompletionServiceExtension
Expand Down
67 changes: 54 additions & 13 deletions OpenAI.SDK/Managers/OpenAIChatCompletions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using System.Text.Json;
using OpenAI.Extensions;
using OpenAI.Interfaces;
using OpenAI.ObjectModels;
using OpenAI.ObjectModels.RequestModels;
using OpenAI.ObjectModels.ResponseModels;

Expand All @@ -17,7 +18,7 @@ public async Task<ChatCompletionCreateResponse> CreateCompletion(ChatCompletionC
}

/// <inheritdoc />
public async IAsyncEnumerable<ChatCompletionCreateResponse> CreateCompletionAsStream(ChatCompletionCreateRequest chatCompletionCreateRequest, string? modelId = null,
public async IAsyncEnumerable<ChatCompletionCreateResponse> CreateCompletionAsStream(ChatCompletionCreateRequest chatCompletionCreateRequest, string? modelId = null, bool justDataMode = true,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Helper data in case we need to reassemble a multi-packet response
Expand All @@ -39,12 +40,18 @@ public async IAsyncEnumerable<ChatCompletionCreateResponse> CreateCompletionAsSt
cancellationToken.ThrowIfCancellationRequested();

var line = await reader.ReadLineAsync();

// Skip empty lines
if (string.IsNullOrEmpty(line))
{
continue;
}

if (justDataMode && !line.StartsWith("data: "))
{
continue;
}

line = line.RemoveIfStartWith("data: ");

// Exit the loop if the stream is done
Expand Down Expand Up @@ -81,22 +88,23 @@ public async IAsyncEnumerable<ChatCompletionCreateResponse> CreateCompletionAsSt
}

/// <summary>
/// This helper class attempts to reassemble a function call response
/// This helper class attempts to reassemble a tool call with type == "function" response
/// that was split up across several streamed chunks.
/// Note that this only works for the first message in each response,
/// and ignores the others; if OpenAI ever changes their response format
/// this will need to be adjusted.
/// </summary>
private class ReassemblyContext
{
private FunctionCall? FnCall;
private IList<ToolCall>? _deltaFnCallList;
private IList<ToolCall>? _toolCallList;

public bool IsFnAssemblyActive => FnCall != null;
public bool IsFnAssemblyActive => _deltaFnCallList != null;


/// <summary>
/// Detects if a response block is a part of a multi-chunk
/// streamed function call response. As long as that's true,
/// streamed tool call response of type == "function". As long as that's true,
/// it keeps accumulating block contents, and once function call
/// streaming is done, it produces the assembled results in the final block.
/// </summary>
Expand All @@ -117,36 +125,69 @@ public void Process(ChatCompletionCreateResponse block)
// We're going to steal the partial message and squirrel it away for the time being.
if (!IsFnAssemblyActive && isStreamingFnCall)
{
FnCall = firstChoice.Message.FunctionCall;
firstChoice.Message.FunctionCall = null;
_toolCallList = firstChoice.Message.ToolCalls;
_deltaFnCallList = new List<ToolCall>();
foreach (var t in _toolCallList!)
{
if (t.FunctionCall != null && t.Type == StaticValues.CompletionStatics.ToolType.Function)
_deltaFnCallList.Add(t);
}

justStarted = true;
}

// As long as we're assembling, keep on appending those args
// (Skip the first one, because it was already processed in the block above)
if (IsFnAssemblyActive && !justStarted)
{
FnCall.Arguments += ExtractArgsSoFar();
//Handles just ToolCall type == "function"
using var argumentsList = ExtractArgsSoFar().GetEnumerator();
var existItems = argumentsList.MoveNext();

if (existItems)
{
foreach (var f in _deltaFnCallList!)
{
f.FunctionCall!.Arguments += argumentsList.Current;
argumentsList.MoveNext();
}
}
}

// If we were assembling and it just finished, fill this block with the info we've assembled, and we're done.
if (IsFnAssemblyActive && !isStreamingFnCall)
{
firstChoice.Message ??= ChatMessage.FromAssistant(""); // just in case? not sure it's needed
firstChoice.Message.FunctionCall = FnCall;
FnCall = null;
firstChoice.Message.ToolCalls = _toolCallList;
_deltaFnCallList = null;
}

// Returns true if we're actively streaming, and also have a partial function call in the response
bool IsStreamingFunctionCall()
{
return firstChoice.FinishReason == null && // actively streaming, and
firstChoice.Message?.FunctionCall != null;
firstChoice.Message?.ToolCalls?.Count > 0 &&
(firstChoice.Message?.ToolCalls.Any(t => t.FunctionCall != null) ?? false);
} // have a function call

string ExtractArgsSoFar()
IEnumerable<string> ExtractArgsSoFar()
{
return block.Choices?.FirstOrDefault()?.Message?.FunctionCall?.Arguments ?? "";
var toolCalls = block.Choices?.FirstOrDefault()?.Message?.ToolCalls;

if (toolCalls != null)
{
var functionCallList = toolCalls
.Where(t => t.FunctionCall != null)
.Select(t => t.FunctionCall);

if (functionCallList != null)
{
foreach (var functionCall in functionCallList)
{
yield return functionCall!.Arguments ?? "";
}
}
}
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion OpenAI.SDK/ObjectModels/Models.cs
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ public static string EnumToString(this Subject subject, string baseModel)
Subject.Code => "code-{0}",
Subject.CodeEdit => "code-{0}-edit",
Subject.Edit => "text-{0}-edit",
Subject.TextEmbedding => "text-embedding-{0}",
Subject.TextEmbedding => "text-embedding",
Subject.TextModeration => "text-moderation-{0}",
_ => throw new ArgumentOutOfRangeException(nameof(subject), subject, null)
}, baseModel);
Expand Down
2 changes: 1 addition & 1 deletion OpenAI.SDK/OpenAI.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
<PackageIcon>OpenAI-Betalgo.png</PackageIcon>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Title>OpenAI SDK by Betalgo</Title>
<Version>7.4.4</Version>
<Version>7.4.5</Version>
<Authors>Tolga Kayhan, Betalgo</Authors>
<Company>Betalgo Up Ltd.</Company>
<Product>OpenAI ChatGPT, Whisper, GPT-4 and DALL·E dotnet SDK</Product>
Expand Down
28 changes: 27 additions & 1 deletion OpenAI.Utilities.Tests/FunctionCallingHelperTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public void VerifyGetFunctionDefinitions()
{
var functionDefinitions = FunctionCallingHelper.GetToolDefinitions<FunctionCallingTestClass>();

functionDefinitions.Count.ShouldBe(3);
functionDefinitions.Count.ShouldBe(4);

var functionDefinition = functionDefinitions.First(x => x.Function!.Name == "TestFunction");
functionDefinition.Function!.Description.ShouldBe("Test Function");
Expand All @@ -79,6 +79,26 @@ public void VerifyGetFunctionDefinitions()
functionDefinition3.Function!.Description.ShouldBe("Third Function");
functionDefinition3.Function!.Parameters.ShouldNotBeNull();
functionDefinition3.Function!.Parameters.Properties!.Count.ShouldBe(1);

var functionDefinition4 = functionDefinitions.First(x => x.Function!.Name == "fourth_function");
functionDefinition4.Function!.Description.ShouldBe("Fourth Function");
functionDefinition4.Function!.Name.ShouldBe("fourth_function");
functionDefinition4.Function!.Parameters.ShouldNotBeNull();
functionDefinition4.Function!.Parameters.Properties!.Count.ShouldBe(0);
}

[Fact]
public void VerifyCallFunction_CustomFunctionName()
{
var obj = new FunctionCallingTestClass();

var functionCall = new FunctionCall
{
Name = "fourth_function"
};

var result = FunctionCallingHelper.CallFunction<string>(functionCall, obj);
result.ShouldBe("Ciallo~(∠・ω< )⌒★");
}

[Fact]
Expand Down Expand Up @@ -268,6 +288,12 @@ public void ThirdFunction([ParameterDescription(Type = "string", Description = "
{
OverriddenTypeParameter = overriddenTypeParameter.ToString();
}

[FunctionDescription("Fourth Function", Name = "fourth_function")]
public string FourthFunction()
{
return "Ciallo~(∠・ω< )⌒★";
}
}

public enum TestEnum
Expand Down
42 changes: 37 additions & 5 deletions OpenAI.Utilities/FunctionCalling/FunctionCallingHelper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,7 @@ public static List<ToolDefinition> GetToolDefinitions(Type type)
throw new ArgumentNullException(nameof(obj));
}

var methodInfo = obj.GetType().GetMethod(functionCall.Name);

var methodInfo = obj.GetMethod(functionCall);
if (methodInfo == null)
{
throw new InvalidFunctionCallException($"Method '{functionCall.Name}' on type '{obj.GetType()}' not found");
Expand All @@ -180,17 +179,50 @@ public static List<ToolDefinition> GetToolDefinitions(Type type)
var name = parameterDescriptionAttribute?.Name ?? parameter.Name!;
var argument = arguments.FirstOrDefault(x => x.Key == name);

object? value;
if (argument.Key == null)
{
throw new Exception($"Argument '{name}' not found");
if (parameter.IsOptional)
{
value = parameter.DefaultValue;
}
else
{
throw new Exception($"Argument '{name}' not found");
}
}
else
{
value = parameter.ParameterType.IsEnum ? Enum.Parse(parameter.ParameterType, argument.Value.ToString()!) : ((JsonElement)argument.Value).Deserialize(parameter.ParameterType);
}

var value = parameter.ParameterType.IsEnum ? Enum.Parse(parameter.ParameterType, argument.Value.ToString()!) : ((JsonElement) argument.Value).Deserialize(parameter.ParameterType);

args.Add(value);
}

var result = (T?) methodInfo.Invoke(obj, args.ToArray());
return result;
}

private static MethodInfo? GetMethod(this object obj, FunctionCall functionCall)
{
var type = obj.GetType();

// Attempt to find the method directly by name first
if (functionCall.Name != null)
{
var methodByName = type.GetMethod(functionCall.Name);
if (methodByName != null)
{
return methodByName;
}
}

// If not found, then look for methods with the custom attribute
var methodsWithAttributes = type
.GetMethods()
.FirstOrDefault(m => m.GetCustomAttributes(typeof(FunctionDescriptionAttribute), false).FirstOrDefault() is FunctionDescriptionAttribute attr && attr.Name == functionCall.Name);

return methodsWithAttributes;
}

}
6 changes: 3 additions & 3 deletions OpenAI.Utilities/OpenAI.Utilities.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@
</ItemGroup>

<ItemGroup>
<PackageReference Include="Betalgo.OpenAI" Version="7.4.3" />
<PackageReference Include="CsvHelper" Version="30.0.1" />
<PackageReference Include="Betalgo.OpenAI" Version="7.4.4" />
<PackageReference Include="CsvHelper" Version="30.1.0" />
<PackageReference Include="MathNet.Numerics" Version="5.0.0" />
<PackageReference Include="Microsoft.Data.Analysis" Version="0.21.0" />
<PackageReference Include="Microsoft.Data.Analysis" Version="0.21.1" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UserSecretsId>acb2421b-1517-4212-93a4-e4eb182b4626</UserSecretsId>
<LangVersion>latest</LangVersion>
</PropertyGroup>

<ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ public static async Task ExerciseFunctionCalling(IOpenAIService openAIService)

var response = reply.Choices.First().Message;

if (response.FunctionCall != null)
if (response.ToolCalls != null)
{
Console.WriteLine($"Invoking {response.FunctionCall.Name} with params: {response.FunctionCall.Arguments}");
Console.WriteLine($"Invoking {response.ToolCalls.First().FunctionCall.Name} with params: {response.ToolCalls.First().FunctionCall.Arguments}");
}
else
{
Expand All @@ -47,10 +47,10 @@ public static async Task ExerciseFunctionCalling(IOpenAIService openAIService)

req.Messages.Add(response);

if (response.FunctionCall != null)
if (response.ToolCalls != null)
{
var functionCall = response.FunctionCall;
var result = FunctionCallingHelper.CallFunction<float>(functionCall, calculator);
var functionCall = response.ToolCalls.First().FunctionCall;
var result = FunctionCallingHelper.CallFunction<float>(functionCall!, calculator);
response.Content = result.ToString(CultureInfo.CurrentCulture);
}
} while (req.Messages.Last().FunctionCall != null);
Expand Down
14 changes: 13 additions & 1 deletion Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,14 @@ var fn1 = new FunctionDefinitionBuilder("get_current_weather", "Get the current
.AddParameter("values", PropertyDefinition.DefineArray(PropertyDefinition.DefineNumber("Sequence of numbers specified by the user")))
.Build();

var tools = new List<ToolDefinition>()
{
new ToolDefinition() { Function = fn1 },
new ToolDefinition() { Function = fn2 },
new ToolDefinition() { Function = fn3 },
new ToolDefinition() { Function = fn4 },
}

ConsoleExtensions.WriteLine("Chat Function Call Test:", ConsoleColor.DarkCyan);
var completionResult = await sdk.ChatCompletion.CreateCompletion(new ChatCompletionCreateRequest
{
Expand All @@ -149,7 +157,7 @@ var fn1 = new FunctionDefinitionBuilder("get_current_weather", "Get the current
ChatMessage.FromSystem("Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous."),
ChatMessage.FromUser("Give me a weather report for Chicago, USA, for the next 5 days.")
},
Functions = new List<FunctionDefinition> { fn1, fn2, fn3, fn4 },
Tools = tools,
MaxTokens = 50,
Model = Models.Gpt_3_5_Turbo
});
Expand Down Expand Up @@ -294,6 +302,10 @@ I will always be using the latest libraries, and future releases will frequently
I am incredibly busy. If I forgot your name, please accept my apologies and let me know so I can add it to the list.

## Changelog
### 7.4.5
- Fixed function calling streaming bugs thanks to @David-Buyer @dogdie233 @gavi @Maracaipe611
- Breaking Change:
While streaming (`CreateCompletionAsStream`), there were some unexpected incoming data chunks like `:pings` or `:events`, etc. @gavi discovered this issue. We are now ignoring these chunks. If you were using it, you need to set `justDataMode` to false.
### 7.4.4
- Added support for new models : `TextEmbeddingV3Small`, `TextEmbeddingV3Large`, `Gpt_3_5_Turbo_0125`, `Gpt_4_0125_preview`, `Gpt_4_turbo_preview`, `Text_moderation_007`, `Text_moderation_latest`, `Text_moderation_stable`
- Added optinal dimension and encoding for embedding thanks to @shanepowell
Expand Down

0 comments on commit a56950d

Please sign in to comment.