diff --git a/OpenAI.Playground/Program.cs b/OpenAI.Playground/Program.cs index 88ac4824..dac9d7e5 100644 --- a/OpenAI.Playground/Program.cs +++ b/OpenAI.Playground/Program.cs @@ -42,11 +42,17 @@ // | / \ / \ | \ /) | ( \ /o\ / ) | (\ / | / \ / \ | // |-----------------------------------------------------------------------| - await ChatCompletionTestHelper.RunSimpleChatCompletionTest(sdk); +// Vision +//await VisionTestHelper.RunSimpleVisionTest(sdk); +//await VisionTestHelper.RunSimpleVisionStreamTest(sdk); +//await VisionTestHelper.RunSimpleVisionTestUsingBase64EncodedImage(sdk); + //await ChatCompletionTestHelper.RunSimpleCompletionStreamTest(sdk); //await ChatCompletionTestHelper.RunChatFunctionCallTest(sdk); +//await ChatCompletionTestHelper.RunChatFunctionCallTestAsStream(sdk); //await FineTuningJobTestHelper.RunCaseStudyIsTheModelMakingUntrueStatements(sdk); + // Whisper //await AudioTestHelper.RunSimpleAudioCreateTranscriptionTest(sdk); //await AudioTestHelper.RunSimpleAudioCreateTranslationTest(sdk); diff --git a/OpenAI.Playground/TestHelpers/ChatCompletionTestHelper.cs b/OpenAI.Playground/TestHelpers/ChatCompletionTestHelper.cs index 085d3a65..5f22657a 100644 --- a/OpenAI.Playground/TestHelpers/ChatCompletionTestHelper.cs +++ b/OpenAI.Playground/TestHelpers/ChatCompletionTestHelper.cs @@ -97,7 +97,7 @@ public static async Task RunSimpleCompletionStreamTest(IOpenAIService sdk) public static async Task RunChatFunctionCallTest(IOpenAIService sdk) { - ConsoleExtensions.WriteLine("Chat Function Call Testing is starting:", ConsoleColor.Cyan); + ConsoleExtensions.WriteLine("Chat Tool Functions Call Testing is starting:", ConsoleColor.Cyan); // example taken from: // https://github.com/openai/openai-cookbook/blob/main/examples/How_to_call_functions_with_chat_models.ipynb @@ -130,9 +130,11 @@ public static async Task RunChatFunctionCallTest(IOpenAIService sdk) ChatMessage.FromSystem("Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous."), ChatMessage.FromUser("Give me a weather report for Chicago, USA, for the next 5 days.") }, - Functions = new List {fn1, fn2, fn3, fn4}, + Tools = new List { ToolDefinition.DefineFunction(fn1), ToolDefinition.DefineFunction(fn2) ,ToolDefinition.DefineFunction(fn3) ,ToolDefinition.DefineFunction(fn4) }, // optionally, to force a specific function: - // FunctionCall = new Dictionary { { "name", "get_current_weather" } }, + //ToolChoice = ToolChoice.FunctionChoice("get_current_weather"), + // or auto tool choice: + //ToolChoice = ToolChoice.Auto, MaxTokens = 50, Model = Models.Gpt_3_5_Turbo }); @@ -152,13 +154,23 @@ public static async Task RunChatFunctionCallTest(IOpenAIService sdk) var choice = completionResult.Choices.First(); Console.WriteLine($"Message: {choice.Message.Content}"); - var fn = choice.Message.FunctionCall; - if (fn != null) + var tools = choice.Message.ToolCalls; + if (tools != null) { - Console.WriteLine($"Function call: {fn.Name}"); - foreach (var entry in fn.ParseArguments()) + Console.WriteLine($"Tools: {tools.Count}"); + foreach (var toolCall in tools) { - Console.WriteLine($" {entry.Key}: {entry.Value}"); + Console.WriteLine($" {toolCall.Id}: {toolCall.FunctionCall}"); + + var fn = toolCall.FunctionCall; + if (fn != null) + { + Console.WriteLine($" Function call: {fn.Name}"); + foreach (var entry in fn.ParseArguments()) + { + Console.WriteLine($" {entry.Key}: {entry.Value}"); + } + } } } } @@ -181,7 +193,7 @@ public static async Task RunChatFunctionCallTest(IOpenAIService sdk) public static async Task RunChatFunctionCallTestAsStream(IOpenAIService sdk) { - ConsoleExtensions.WriteLine("Chat Function Call Testing is starting:", ConsoleColor.Cyan); + ConsoleExtensions.WriteLine("Chat Tool Functions Call Stream Testing is starting:", ConsoleColor.Cyan); // example taken from: // https://github.com/openai/openai-cookbook/blob/main/examples/How_to_call_functions_with_chat_models.ipynb @@ -221,11 +233,13 @@ public static async Task RunChatFunctionCallTestAsStream(IOpenAIService sdk) // or to test array functions, use this instead: // ChatMessage.FromUser("The combination is: One. Two. Three. Four. Five."), }, - Functions = new List {fn1, fn2, fn3, fn4}, + Tools = new List { ToolDefinition.DefineFunction(fn1), ToolDefinition.DefineFunction(fn2), ToolDefinition.DefineFunction(fn3), ToolDefinition.DefineFunction(fn4) }, // optionally, to force a specific function: - // FunctionCall = new Dictionary { { "name", "get_current_weather" } }, + ToolChoice = ToolChoice.FunctionChoice("get_current_weather"), + // or auto tool choice: + // ToolChoice = ToolChoice.Auto, MaxTokens = 50, - Model = Models.Gpt_3_5_Turbo_0613 + Model = Models.Gpt_4_1106_preview }); /* when testing weather forecasts, expected output should be along the lines of: @@ -243,7 +257,7 @@ public static async Task RunChatFunctionCallTestAsStream(IOpenAIService sdk) Function call: identify_number_sequence values: [1, 2, 3, 4, 5] */ - + var functionArguments = new Dictionary(); await foreach (var completionResult in completionResults) { if (completionResult.Successful) @@ -251,13 +265,49 @@ public static async Task RunChatFunctionCallTestAsStream(IOpenAIService sdk) var choice = completionResult.Choices.First(); Console.WriteLine($"Message: {choice.Message.Content}"); - var fn = choice.Message.FunctionCall; - if (fn != null) + var tools = choice.Message.ToolCalls; + if (tools != null) { - Console.WriteLine($"Function call: {fn.Name}"); - foreach (var entry in fn.ParseArguments()) + Console.WriteLine($"Tools: {tools.Count}"); + for (int i = 0; i < tools.Count; i++) { - Console.WriteLine($" {entry.Key}: {entry.Value}"); + var toolCall = tools[i]; + Console.WriteLine($" {toolCall.Id}: {toolCall.FunctionCall}"); + + var fn = toolCall.FunctionCall; + if (fn != null) + { + if (!string.IsNullOrEmpty(fn.Name)) + { + Console.WriteLine($" Function call: {fn.Name}"); + } + + if (!string.IsNullOrEmpty(fn.Arguments)) + { + if (functionArguments.TryGetValue(i, out var currentArguments)) + { + currentArguments += fn.Arguments; + } + else + { + currentArguments = fn.Arguments; + } + functionArguments[i] = currentArguments; + fn.Arguments = currentArguments; + + try + { + foreach (var entry in fn.ParseArguments()) + { + Console.WriteLine($" {entry.Key}: {entry.Value}"); + } + } + catch (Exception) + { + // ignore + } + } + } } } } diff --git a/OpenAI.Playground/TestHelpers/VisionTestHelper.cs b/OpenAI.Playground/TestHelpers/VisionTestHelper.cs new file mode 100644 index 00000000..fc3e09db --- /dev/null +++ b/OpenAI.Playground/TestHelpers/VisionTestHelper.cs @@ -0,0 +1,185 @@ +using OpenAI.Interfaces; +using OpenAI.ObjectModels; +using OpenAI.ObjectModels.RequestModels; +using static OpenAI.ObjectModels.StaticValues; + +namespace OpenAI.Playground.TestHelpers; + +internal static class VisionTestHelper +{ + public static async Task RunSimpleVisionTest(IOpenAIService sdk) + { + ConsoleExtensions.WriteLine("VIsion Testing is starting:", ConsoleColor.Cyan); + + try + { + ConsoleExtensions.WriteLine("Vision Test:", ConsoleColor.DarkCyan); + + var completionResult = await sdk.ChatCompletion.CreateCompletion( + new ChatCompletionCreateRequest + { + Messages = new List + { + ChatMessage.FromSystem("You are an image analyzer assistant."), + ChatMessage.FromUser( + new List + { + MessageContent.TextContent("What is on the picture in details?"), + MessageContent.ImageUrlContent( + "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", + ImageStatics.ImageDetailTypes.High + ) + } + ), + }, + MaxTokens = 300, + Model = Models.Gpt_4_vision_preview, + N = 1 + } + ); + + if (completionResult.Successful) + { + Console.WriteLine(completionResult.Choices.First().Message.Content); + } + else + { + if (completionResult.Error == null) + { + throw new Exception("Unknown Error"); + } + + Console.WriteLine( + $"{completionResult.Error.Code}: {completionResult.Error.Message}" + ); + } + } + catch (Exception e) + { + Console.WriteLine(e); + throw; + } + } + + public static async Task RunSimpleVisionStreamTest(IOpenAIService sdk) + { + ConsoleExtensions.WriteLine("Vision Stream Testing is starting:", ConsoleColor.Cyan); + try + { + ConsoleExtensions.WriteLine("Vision Stream Test:", ConsoleColor.DarkCyan); + + var completionResult = sdk.ChatCompletion.CreateCompletionAsStream( + new ChatCompletionCreateRequest + { + Messages = new List + { + ChatMessage.FromSystem("You are an image analyzer assistant."), + ChatMessage.FromUser( + new List + { + MessageContent.TextContent("Whats in this image?"), + MessageContent.ImageUrlContent( + "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", + ImageStatics.ImageDetailTypes.Low + ) + } + ), + }, + MaxTokens = 300, + Model = Models.Gpt_4_vision_preview, + N = 1 + } + ); + + await foreach (var completion in completionResult) + { + if (completion.Successful) + { + Console.Write(completion.Choices.First().Message.Content); + } + else + { + if (completion.Error == null) + { + throw new Exception("Unknown Error"); + } + + Console.WriteLine( + $"{completion.Error.Code}: {completion.Error.Message}" + ); + } + } + + Console.WriteLine(""); + Console.WriteLine("Complete"); + } + catch (Exception e) + { + Console.WriteLine(e); + throw; + } + } + + public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIService sdk) + { + ConsoleExtensions.WriteLine("Vision Testing is starting:", ConsoleColor.Cyan); + + try + { + ConsoleExtensions.WriteLine( + "Vision with base64 encoded image Test:", + ConsoleColor.DarkCyan + ); + + const string originalFileName = "image_edit_original.png"; + var originalFile = await FileExtensions.ReadAllBytesAsync( + $"SampleData/{originalFileName}" + ); + + var completionResult = await sdk.ChatCompletion.CreateCompletion( + new ChatCompletionCreateRequest + { + Messages = new List + { + ChatMessage.FromSystem("You are an image analyzer assistant."), + ChatMessage.FromUser( + new List + { + MessageContent.TextContent("What is on the picture in details?"), + MessageContent.ImageBinaryContent( + originalFile, + ImageStatics.ImageFileTypes.Png, + ImageStatics.ImageDetailTypes.High + ) + } + ), + }, + MaxTokens = 300, + Model = Models.Gpt_4_vision_preview, + N = 1 + } + ); + + if (completionResult.Successful) + { + Console.WriteLine(completionResult.Choices.First().Message.Content); + } + else + { + if (completionResult.Error == null) + { + throw new Exception("Unknown Error"); + } + + Console.WriteLine( + $"{completionResult.Error.Code}: {completionResult.Error.Message}" + ); + } + } + catch (Exception e) + { + Console.WriteLine(e); + throw; + } + } +} diff --git a/OpenAI.SDK/ObjectModels/RequestModels/ChatCompletionCreateRequest.cs b/OpenAI.SDK/ObjectModels/RequestModels/ChatCompletionCreateRequest.cs index ba1424d1..a11308d0 100644 --- a/OpenAI.SDK/ObjectModels/RequestModels/ChatCompletionCreateRequest.cs +++ b/OpenAI.SDK/ObjectModels/RequestModels/ChatCompletionCreateRequest.cs @@ -22,28 +22,6 @@ public enum ResponseFormats [JsonPropertyName("messages")] public IList Messages { get; set; } - /// - /// A list of functions the model may generate JSON inputs for. - /// - [JsonIgnore] - public IList? Functions { get; set; } - - [JsonIgnore] public object? FunctionsAsObject { get; set; } - - [JsonPropertyName("functions")] - public object? FunctionCalculated - { - get - { - if (FunctionsAsObject != null && Functions != null) - { - throw new ValidationException("FunctionAsObject and Functions can not be assigned at the same time. One of them is should be null."); - } - - return Functions ?? FunctionsAsObject; - } - } - /// /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the /// tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are @@ -138,19 +116,60 @@ public IList? StopCalculated [JsonPropertyName("logit_bias")] public object? LogitBias { get; set; } + /// + /// A list of functions the model may generate JSON inputs for. + /// + [JsonIgnore] + public IList? Tools { get; set; } + + + [JsonIgnore] public object? ToolsAsObject { get; set; } /// - /// String or object. Controls how the model responds to function calls. - /// "none" means the model does not call a function, and responds to the end-user. - /// "auto" means the model can pick between an end-user or calling a function. - /// "none" is the default when no functions are present. "auto" is the default if functions are present. - /// Specifying a particular function via {"name": "my_function"} forces the model to call that function. - /// (Note: in C# specify that as: - /// FunctionCall = new Dictionary<string, string> { { "name", "my_function" } } - /// ). + /// A list of tools the model may call. Currently, only functions are supported as a tool. Use this to provide a list + /// of functions the model may generate JSON inputs for. /// - [JsonPropertyName("function_call")] - public object? FunctionCall { get; set; } + [JsonPropertyName("tools")] public object? ToolsCalculated + { + get + { + if (ToolsAsObject != null && Tools != null) + { + throw new ValidationException("ToolsAsObject and Tools can not be assigned at the same time. One of them is should be null."); + } + + return Tools ?? ToolsAsObject; + } + } + + /// + /// Controls which (if any) function is called by the model. none means the model will not call a function and instead + /// generates a message. auto means the model can pick between generating a message or calling a function. Specifying + /// a particular function via {"type: "function", "function": {"name": "my_function"}} forces the model to call that + /// function. + /// none is the default when no functions are present. auto is the default if functions are present. + /// + [JsonIgnore] + public ToolChoice? ToolChoice { get; set; } + + [JsonPropertyName("tool_choice")] + public object? ToolChoiceCalculated + { + get + { + if (ToolChoice != null && ToolChoice.Type != StaticValues.CompletionStatics.ToolChoiceType.Function && ToolChoice.Function != null) + { + throw new ValidationException("You cannot choose another type besides \"function\" while ToolChoice.Function is not null."); + } + + if (ToolChoice?.Type == StaticValues.CompletionStatics.ToolChoiceType.Function) + { + return ToolChoice; + } + + return ToolChoice?.Type; + } + } /// /// The format that the model must output. Used to enable JSON mode. diff --git a/OpenAI.SDK/ObjectModels/RequestModels/ChatMessage.cs b/OpenAI.SDK/ObjectModels/RequestModels/ChatMessage.cs index 037f3ac1..3e3e1a71 100644 --- a/OpenAI.SDK/ObjectModels/RequestModels/ChatMessage.cs +++ b/OpenAI.SDK/ObjectModels/RequestModels/ChatMessage.cs @@ -1,4 +1,5 @@ -using System.Text.Json.Serialization; +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; namespace OpenAI.ObjectModels.RequestModels; @@ -9,6 +10,10 @@ namespace OpenAI.ObjectModels.RequestModels; /// public class ChatMessage { + public ChatMessage() + { + } + /// /// /// The role of the author of this message. One of system, user, or assistant. @@ -17,13 +22,34 @@ public class ChatMessage /// The name of the author of this message. May contain a-z, A-Z, 0-9, and underscores, with a maximum /// length of 64 characters. /// - /// The name and arguments of a function that should be called, as generated by the model. - public ChatMessage(string role, string content, string? name = null, FunctionCall? functionCall = null) + /// The tool function call id generated by the model + /// The tool calls generated by the model. + public ChatMessage(string role, string content, string? name = null, IList? toolCalls = null, string? toolCallId = null) { Role = role; Content = content; Name = name; - FunctionCall = functionCall; + ToolCalls = toolCalls; + ToolCallId = toolCallId; + } + + /// + /// + /// The role of the author of this message. One of system, user, or assistant. + /// The list of the content messages. + /// + /// The name of the author of this message. May contain a-z, A-Z, 0-9, and underscores, with a maximum + /// length of 64 characters. + /// + /// The tool function call id generated by the model + /// The tool calls generated by the model. + public ChatMessage(string role, IList contents, string? name = null, IList? toolCalls = null, string? toolCallId = null) + { + Role = role; + Contents = contents; + Name = name; + ToolCalls = toolCalls; + ToolCallId = toolCallId; } /// @@ -32,11 +58,34 @@ public ChatMessage(string role, string content, string? name = null, FunctionCal [JsonPropertyName("role")] public string Role { get; set; } + [JsonIgnore] public string? Content { get; set; } + + [JsonIgnore] public IList? Contents { get; set; } + /// /// The contents of the message. /// [JsonPropertyName("content")] - public string Content { get; set; } + public object ContentCalculated + { + get + { + if (Content is not null && Contents is not null) + { + throw new ValidationException( + "Content and Contents can not be assigned at the same time. One of them must be null." + ); + } + + if (Content is not null) + { + return Content; + } + + return Contents!; + } + set => Content = value?.ToString(); + } /// /// The name of the author of this message. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 @@ -46,28 +95,47 @@ public ChatMessage(string role, string content, string? name = null, FunctionCal public string? Name { get; set; } /// - /// The name and arguments of a function that should be called, as generated by the model. + /// Required for tool role messages. + /// Tool call that this message is responding to. + /// + [JsonPropertyName("tool_call_id")] + public string? ToolCallId { get; set; } + + /// + /// Deprecated and replaced by tool_calls. The name and arguments of a function that should be called, as generated by + /// the model. /// [JsonPropertyName("function_call")] public FunctionCall? FunctionCall { get; set; } - public static ChatMessage FromAssistant(string content, string? name = null, FunctionCall? functionCall = null) + /// + /// The tool calls generated by the model, such as function calls. + /// + [JsonPropertyName("tool_calls")] + public IList? ToolCalls { get; set; } + + public static ChatMessage FromAssistant(string content, string? name = null, IList? toolCalls = null) { - return new ChatMessage(StaticValues.ChatMessageRoles.Assistant, content, name, functionCall); + return new(StaticValues.ChatMessageRoles.Assistant, content, name, toolCalls); } - public static ChatMessage FromFunction(string content, string? name = null) + public static ChatMessage FromTool(string content, string toolCallId) { - return new ChatMessage(StaticValues.ChatMessageRoles.Function, content, name); + return new(StaticValues.ChatMessageRoles.Tool, content, toolCallId: toolCallId); } public static ChatMessage FromUser(string content, string? name = null) { - return new ChatMessage(StaticValues.ChatMessageRoles.User, content, name); + return new(StaticValues.ChatMessageRoles.User, content, name); } public static ChatMessage FromSystem(string content, string? name = null) { - return new ChatMessage(StaticValues.ChatMessageRoles.System, content, name); + return new(StaticValues.ChatMessageRoles.System, content, name); + } + + public static ChatMessage FromUser(IList contents) + { + return new(StaticValues.ChatMessageRoles.User, contents); } } \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/RequestModels/FunctionDefinition.cs b/OpenAI.SDK/ObjectModels/RequestModels/FunctionDefinition.cs index 712345c6..e02b54da 100644 --- a/OpenAI.SDK/ObjectModels/RequestModels/FunctionDefinition.cs +++ b/OpenAI.SDK/ObjectModels/RequestModels/FunctionDefinition.cs @@ -9,24 +9,24 @@ namespace OpenAI.ObjectModels.RequestModels; public class FunctionDefinition { /// - /// Required. The name of the function to be called. Must be a-z, A-Z, 0-9, + /// The name of the function to be called. Must be a-z, A-Z, 0-9, /// or contain underscores and dashes, with a maximum length of 64. /// [JsonPropertyName("name")] public string Name { get; set; } /// - /// Optional. The description of what the function does. + /// A description of what the function does, used by the model to choose when and how to call the function. /// [JsonPropertyName("description")] public string? Description { get; set; } /// /// Optional. The parameters the functions accepts, described as a JSON Schema object. - /// See the guide (https://platform.openai.com/docs/guides/gpt/function-calling) for examples, - /// and the JSON Schema reference (https://json-schema.org/understanding-json-schema/) - /// for documentation about the format. + /// See the guide for examples, + /// and the JSON Schema reference for + /// documentation about the format. /// [JsonPropertyName("parameters")] - public PropertyDefinition? Parameters { get; set; } + public PropertyDefinition Parameters { get; set; } } \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/RequestModels/MessageContent.cs b/OpenAI.SDK/ObjectModels/RequestModels/MessageContent.cs new file mode 100644 index 00000000..02f0eafa --- /dev/null +++ b/OpenAI.SDK/ObjectModels/RequestModels/MessageContent.cs @@ -0,0 +1,81 @@ +using System.Text.Json.Serialization; + +namespace OpenAI.ObjectModels.RequestModels; + +/// +/// The content of a message. +/// +public class MessageContent +{ + /// + /// The value of Type property must be one of "text", "image_url" + /// + /// note: Currently openAI doesn't support images in the first system message. + /// + [JsonPropertyName("type")] + public string Type { get; set; } + + /// + /// If the value of Type property is "text" then Text property must contain the message content text + /// + [JsonPropertyName("text")] + public string? Text { get; set; } + + /// + /// If the value of Type property is "image_url" then ImageUrl property must contain a valid image url object + /// + [JsonPropertyName("image_url")] + public VisionImageUrl? ImageUrl { get; set; } + + /// + /// Static helper method to create MessageContent Text + /// The text content + /// + public static MessageContent TextContent(string text) + { + return new() { Type = "text", Text = text }; + } + + /// + /// Static helper method to create MessageContent with Url + /// OpenAI currently supports PNG, JPEG, WEBP, and non-animated GIF + /// The url of an image + /// The detail property + /// + public static MessageContent ImageUrlContent(string imageUrl, string? detail = null) + { + return new() + { + Type = "image_url", + ImageUrl = new() { Url = imageUrl, Detail = detail } + }; + } + + /// + /// Static helper method to create MessageContent from binary image + /// OpenAI currently supports PNG, JPEG, WEBP, and non-animated GIF + /// The image binary data as byte array + /// The type of image + /// The detail property + /// + public static MessageContent ImageBinaryContent( + byte[] binaryImage, + string imageType, + string? detail = "auto" + ) + { + return new() + { + Type = "image_url", + ImageUrl = new() + { + Url = string.Format( + "data:image/{0};base64,{{{1}}}", + imageType, + Convert.ToBase64String(binaryImage) + ), + Detail = detail + } + }; + } +} diff --git a/OpenAI.SDK/ObjectModels/RequestModels/ToolCall.cs b/OpenAI.SDK/ObjectModels/RequestModels/ToolCall.cs new file mode 100644 index 00000000..96e9d3fb --- /dev/null +++ b/OpenAI.SDK/ObjectModels/RequestModels/ToolCall.cs @@ -0,0 +1,24 @@ +using System.Text.Json.Serialization; + +namespace OpenAI.ObjectModels.RequestModels; + +public class ToolCall +{ + /// + /// The ID of the tool call. + /// + [JsonPropertyName("id")] + public string Id { get; set; } + + /// + /// The type of the tool. Currently, only function is supported. + /// + [JsonPropertyName("type")] + public string Type { get; set; } + + /// + /// The function that the model called. + /// + [JsonPropertyName("function")] + public FunctionCall? FunctionCall { get; set; } +} \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/RequestModels/ToolChoiceFunction.cs b/OpenAI.SDK/ObjectModels/RequestModels/ToolChoiceFunction.cs new file mode 100644 index 00000000..a262248b --- /dev/null +++ b/OpenAI.SDK/ObjectModels/RequestModels/ToolChoiceFunction.cs @@ -0,0 +1,34 @@ +using System.Text.Json.Serialization; + +namespace OpenAI.ObjectModels.RequestModels; + +public class ToolChoice +{ + public static ToolChoice None => new() { Type = StaticValues.CompletionStatics.ToolChoiceType.None }; + public static ToolChoice Auto => new() { Type = StaticValues.CompletionStatics.ToolChoiceType.Auto }; + public static ToolChoice FunctionChoice(string functionName) =>new() + { + Type = StaticValues.CompletionStatics.ToolChoiceType.Function, + Function = new FunctionTool() + { + Name = functionName + } + }; + + /// + /// "none" is the default when no functions are present.
+ /// "auto" is the default if functions are present.
+ /// "function" has to be assigned if user Function is not null
+ ///
+ /// Check for possible values. + ///
+ [JsonPropertyName("type")] + public string Type { get; set; } + + [JsonPropertyName("function")] public FunctionTool? Function { get; set; } + + public class FunctionTool + { + [JsonPropertyName("name")] public string Name { get; set; } + } +} \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/RequestModels/ToolDefinition.cs b/OpenAI.SDK/ObjectModels/RequestModels/ToolDefinition.cs new file mode 100644 index 00000000..5a1cf9b8 --- /dev/null +++ b/OpenAI.SDK/ObjectModels/RequestModels/ToolDefinition.cs @@ -0,0 +1,49 @@ +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; + +namespace OpenAI.ObjectModels.RequestModels; + +/// +/// Definition of a valid tool. +/// +public class ToolDefinition +{ + /// + /// Required. The type of the tool. Currently, only function is supported. + /// + [JsonPropertyName("type")] + public string Type { get; set; } + + + /// + /// A list of functions the model may generate JSON inputs for. + /// + [JsonIgnore] + public FunctionDefinition? Function { get; set; } + + [JsonIgnore] + public object? FunctionsAsObject { get; set; } + + /// + /// Required. The description of what the function does. + /// + [JsonPropertyName("function")] + public object? FunctionCalculated + { + get + { + if (FunctionsAsObject != null && Function != null) + { + throw new ValidationException("FunctionAsObject and Function can not be assigned at the same time. One of them is should be null."); + } + + return Function ?? FunctionsAsObject; + } + } + + public static ToolDefinition DefineFunction(FunctionDefinition function) => new() + { + Type = StaticValues.CompletionStatics.ToolType.Function, + Function = function + }; +} \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/RequestModels/VisionImageUrl.cs b/OpenAI.SDK/ObjectModels/RequestModels/VisionImageUrl.cs new file mode 100644 index 00000000..678b76d1 --- /dev/null +++ b/OpenAI.SDK/ObjectModels/RequestModels/VisionImageUrl.cs @@ -0,0 +1,37 @@ +using System.Text.Json.Serialization; + +namespace OpenAI.ObjectModels.RequestModels; + +/// +/// The image_url object of vision message content +/// +public class VisionImageUrl +{ + /// + /// The Url property + /// Images are made available to the model in two main ways: by passing a link to the image or by passing the base64 encoded image directly in the url property. + /// link example: "url" : "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + /// base64 encoded image example: "url" : "data:image/jpeg;base64,{base64_image}" + /// + /// Limitations: + /// OpenAI currently supports PNG (.png), JPEG (.jpeg and .jpg), WEBP (.webp), and non-animated GIF (.gif) image formats + /// Image upload size is limited to 20MB per image + /// Captcha submission is blocked + /// + /// + [JsonPropertyName("url")] + public string Url { get; set; } + + /// + /// The optional Detail property controls low or high fidelity image understanding + /// It has three options, low, high, or auto, you have control over how the model processes the image and generates its textual understanding. + /// By default, the model will use the auto setting which will look at the image input size and decide if it should use the low or high setting. + /// + /// low will disable the “high res” model. The model will receive a low-res 512px x 512px version of the image. + /// high will enable “high res” mode, which first allows the model to see the low res image and then creates detailed crops of input images + /// as 512px squares based on the input image size. + /// + [JsonPropertyName("detail")] + public string? Detail { get; set; } + +} \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/ResponseModels/ChatCompletionCreateResponse.cs b/OpenAI.SDK/ObjectModels/ResponseModels/ChatCompletionCreateResponse.cs index 0615826a..096ce70d 100644 --- a/OpenAI.SDK/ObjectModels/ResponseModels/ChatCompletionCreateResponse.cs +++ b/OpenAI.SDK/ObjectModels/ResponseModels/ChatCompletionCreateResponse.cs @@ -14,4 +14,5 @@ public record ChatCompletionCreateResponse : BaseResponse, IOpenAiModels.IId, IO [JsonPropertyName("created")] public int CreatedAt { get; set; } [JsonPropertyName("id")] public string Id { get; set; } + [JsonPropertyName("system_fingerprint")] public string SystemFingerPrint { get; set; } } \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/ResponseModels/ImageResponseModel/ImageCreateResponse.cs b/OpenAI.SDK/ObjectModels/ResponseModels/ImageResponseModel/ImageCreateResponse.cs index e3a4db2a..d1f436a8 100644 --- a/OpenAI.SDK/ObjectModels/ResponseModels/ImageResponseModel/ImageCreateResponse.cs +++ b/OpenAI.SDK/ObjectModels/ResponseModels/ImageResponseModel/ImageCreateResponse.cs @@ -13,5 +13,6 @@ public record ImageDataResult { [JsonPropertyName("url")] public string Url { get; set; } [JsonPropertyName("b64_json")] public string B64 { get; set; } + [JsonPropertyName("revised_prompt")] public string RevisedPrompt { get; set; } } } \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/SharedModels/ChatChoiceResponse.cs b/OpenAI.SDK/ObjectModels/SharedModels/ChatChoiceResponse.cs index 441d51b5..630f850e 100644 --- a/OpenAI.SDK/ObjectModels/SharedModels/ChatChoiceResponse.cs +++ b/OpenAI.SDK/ObjectModels/SharedModels/ChatChoiceResponse.cs @@ -17,4 +17,13 @@ public ChatMessage Delta [JsonPropertyName("index")] public int? Index { get; set; } [JsonPropertyName("finish_reason")] public string FinishReason { get; set; } + + [JsonPropertyName("finish_details")] public FinishDetailsResponse? FinishDetails { get; set; } + public class FinishDetailsResponse + { + [JsonPropertyName("type")] + public string Type { get; set; } + [JsonPropertyName("stop")] + public string Stop { get; set; } + } } \ No newline at end of file diff --git a/OpenAI.SDK/ObjectModels/StaticValueHelper.cs b/OpenAI.SDK/ObjectModels/StaticValueHelper.cs index ae6d99ee..4a4d9b8c 100644 --- a/OpenAI.SDK/ObjectModels/StaticValueHelper.cs +++ b/OpenAI.SDK/ObjectModels/StaticValueHelper.cs @@ -9,6 +9,17 @@ public static class ResponseFormat public static string Json => "json_object"; public static string Text => "text"; } + + public static class ToolType + { + public static string Function => "function"; + } + public static class ToolChoiceType + { + public static string Function => ToolType.Function; + public static string Auto => "auto"; + public static string None => "none"; + } } public static class ImageStatics { @@ -46,6 +57,22 @@ public static class Quality public static string Standard => "standard"; public static string Hd => "hd"; } + + public static class ImageFileTypes + { + public static string Jpeg => "JPEG"; + public static string Png => "PNG"; + public static string Webp => "WEBP"; + public static string Gif => "GIF"; + } + + public static class ImageDetailTypes + { + public static string High => "high"; + public static string Low => "low"; + public static string Auto => "auto"; + + } } public static class AudioStatics @@ -82,6 +109,6 @@ public static class ChatMessageRoles public static string System => "system"; public static string User => "user"; public static string Assistant => "assistant"; - public static string Function => "function"; + public static string Tool => "tool"; } } \ No newline at end of file diff --git a/OpenAI.SDK/OpenAI.csproj b/OpenAI.SDK/OpenAI.csproj index eb99be6f..88c679df 100644 --- a/OpenAI.SDK/OpenAI.csproj +++ b/OpenAI.SDK/OpenAI.csproj @@ -11,7 +11,7 @@ OpenAI-Betalgo.png true OpenAI SDK by Betalgo - 7.4.1 + 7.4.2 Tolga Kayhan, Betalgo Betalgo Up Ltd. OpenAI ChatGPT, Whisper, GPT-4 and DALL·E dotnet SDK diff --git a/Readme.md b/Readme.md index 66f5e82f..9656859c 100644 --- a/Readme.md +++ b/Readme.md @@ -28,7 +28,9 @@ Maintenance of this project is made possible by all the bug reporters, [contribu [@AnukarOP](https://github.com/AnukarOP) [@Removable](https://github.com/Removable) ## Features -- [ ] Dev day Updates (Some updates are currently available, while others will be released soon. Please follow the changelogs for more information.) +- [x] Dev day Updates +- [x] Vision Api +- [X] Tools - [X] [Function Calling](https://github.com/betalgo/openai/wiki/Function-Calling) - [ ] Plugins (coming soon) - [x] [Chat GPT](https://github.com/betalgo/openai/wiki/Chat-GPT) @@ -214,6 +216,72 @@ if (imageResult.Successful) } ``` +## VISION Sample +```csharp +var completionResult = await sdk.ChatCompletion.CreateCompletion( + new ChatCompletionCreateRequest + { + Messages = new List + { + ChatMessage.FromSystem("You are an image analyzer assistant."), + ChatMessage.FromUser( + new List + { + MessageContent.TextContent("What is on the picture in details?"), + MessageContent.ImageUrlContent( + "https://www.digitaltrends.com/wp-content/uploads/2016/06/1024px-Bill_Cunningham_at_Fashion_Week_photographed_by_Jiyang_Chen.jpg?p=1", + ImageStatics.ImageDetailTypes.High + ) + } + ), + }, + MaxTokens = 300, + Model = Models.Gpt_4_vision_preview, + N = 1 + } +); + +if (completionResult.Successful) +{ + Console.WriteLine(completionResult.Choices.First().Message.Content); +} +``` + +## VISION Sample using Base64 encoded image +```csharp +const string fileName = "image.png"; +var binaryImage = await FileExtensions.ReadAllBytesAsync(fileName); + +var completionResult = await sdk.ChatCompletion.CreateCompletion( + new ChatCompletionCreateRequest + { + Messages = new List + { + ChatMessage.FromSystem("You are an image analyzer assistant."), + ChatMessage.FromUser( + new List + { + MessageContent.TextContent("What is on the picture in details?"), + MessageContent.ImageBinaryContent( + binaryImage, + ImageStatics.ImageFileTypes.Png, + ImageStatics.ImageDetailTypes.High + ) + } + ), + }, + MaxTokens = 300, + Model = Models.Gpt_4_vision_preview, + N = 1 + } +); + +if (completionResult.Successful) +{ + Console.WriteLine(completionResult.Choices.First().Message.Content); +} +``` + ## Notes: #### This library used to be known as `Betalgo.OpenAI.GPT3`, now it has a new package Id `Betalgo.OpenAI`. @@ -226,6 +294,13 @@ I will always be using the latest libraries, and future releases will frequently I am incredibly busy. If I forgot your name, please accept my apologies and let me know so I can add it to the list. ## Changelog +### 7.4.2 +- Let's start with breaking changes: + - OpenAI has replaced function calling with tools. We have made the necessary changes to our code. This is not a major change; now you just have a wrapper around your function calling, which is named as "tool". The Playground provides an example. Please take a look to see how you can update your code. + This update was completed by @shanepowell. Many thanks to him. +- Now we support the Vision API, which involves passing message contents to the existing chat method. It is quite easy to use, but documentation was not available in the OpenAI API documentation. +This feature was completed by @belaszalontai. Many thanks to them. + ### 7.4.1 - Added support for "Create Speech" thanks to @belaszalontai / @szabe74 ### 7.4.0