diff --git a/OpenAI.Playground/OpenAI.Playground.csproj b/OpenAI.Playground/OpenAI.Playground.csproj
index bb245deb..4886f54d 100644
--- a/OpenAI.Playground/OpenAI.Playground.csproj
+++ b/OpenAI.Playground/OpenAI.Playground.csproj
@@ -59,6 +59,9 @@
PreserveNewest
+
+ PreserveNewest
+
PreserveNewest
diff --git a/OpenAI.Playground/Program.cs b/OpenAI.Playground/Program.cs
index 8af03599..af70b044 100644
--- a/OpenAI.Playground/Program.cs
+++ b/OpenAI.Playground/Program.cs
@@ -49,8 +49,8 @@
// Tools
//await ChatCompletionTestHelper.RunChatFunctionCallTest(sdk);
-await ChatCompletionTestHelper.RunChatFunctionCallTestAsStream(sdk);
-
+//await ChatCompletionTestHelper.RunChatFunctionCallTestAsStream(sdk);
+await BatchTestHelper.RunBatchOperationsTest(sdk);
// Whisper
//await AudioTestHelper.RunSimpleAudioCreateTranscriptionTest(sdk);
//await AudioTestHelper.RunSimpleAudioCreateTranslationTest(sdk);
diff --git a/OpenAI.Playground/SampleData/BatchDataSampleFile.jsonl b/OpenAI.Playground/SampleData/BatchDataSampleFile.jsonl
new file mode 100644
index 00000000..3011decf
--- /dev/null
+++ b/OpenAI.Playground/SampleData/BatchDataSampleFile.jsonl
@@ -0,0 +1 @@
+{"custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": {"model": "gpt-3.5-turbo", "messages": [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "What is 2+2?"}]}}
\ No newline at end of file
diff --git a/OpenAI.Playground/TestHelpers/BatchTestHelper.cs b/OpenAI.Playground/TestHelpers/BatchTestHelper.cs
new file mode 100644
index 00000000..767eb939
--- /dev/null
+++ b/OpenAI.Playground/TestHelpers/BatchTestHelper.cs
@@ -0,0 +1,77 @@
+using OpenAI.Interfaces;
+using OpenAI.ObjectModels.RequestModels;
+
+namespace OpenAI.Playground.TestHelpers;
+
+internal static class BatchTestHelper
+{
+ public static async Task RunBatchOperationsTest(IOpenAIService sdk)
+ {
+ ConsoleExtensions.WriteLine("Batch Operations Testing is starting:", ConsoleColor.Cyan);
+
+ try
+ {
+ ConsoleExtensions.WriteLine("Batch Create Test:", ConsoleColor.DarkCyan);
+
+ const string fileName = "BatchDataSampleFile.jsonl";
+ var sampleFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{fileName}");
+ ConsoleExtensions.WriteLine($"Uploading file {fileName}", ConsoleColor.DarkCyan);
+
+ var fileUploadResult = await sdk.Files.UploadFile("batch", sampleFile, fileName);
+
+ if (!fileUploadResult.Successful)
+ {
+ throw new Exception("File upload failed");
+ }
+
+ var batchCreateResult = await sdk.Batch.BatchCreate(new BatchCreateRequest
+ {
+ InputFileId = fileUploadResult.Id,
+ Endpoint = "/v1/chat/completions",
+ CompletionWindow = "24h"
+ });
+
+ if (!batchCreateResult.Successful)
+ {
+ throw new Exception("Batch creation failed");
+ }
+
+ ConsoleExtensions.WriteLine($"Batch ID: {batchCreateResult.Id}", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($"Batch Status: {batchCreateResult.Status}", ConsoleColor.Green);
+
+ ConsoleExtensions.WriteLine("Batch Retrieve Test:", ConsoleColor.DarkCyan);
+
+ var batchRetrieveResult = await sdk.Batch.BatchRetrieve(batchCreateResult.Id);
+
+ if (!batchRetrieveResult.Successful)
+ {
+ throw new Exception("Batch retrieval failed");
+ }
+
+ ConsoleExtensions.WriteLine($"Batch ID: {batchRetrieveResult.Id}", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($"Batch Status: {batchRetrieveResult.Status}", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($"Request Counts:", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($" Total: {batchRetrieveResult.RequestCounts.Total}", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($" Completed: {batchRetrieveResult.RequestCounts.Completed}", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($" Failed: {batchRetrieveResult.RequestCounts.Failed}", ConsoleColor.Green);
+
+ ConsoleExtensions.WriteLine("Batch Cancel Test:", ConsoleColor.DarkCyan);
+
+ var batchCancelResult = await sdk.Batch.BatchCancel(batchCreateResult.Id);
+
+ if (!batchCancelResult.Successful)
+ {
+ throw new Exception("Batch cancellation failed");
+ }
+
+ ConsoleExtensions.WriteLine($"Batch ID: {batchCancelResult.Id}", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($"Batch Status: {batchCancelResult.Status}", ConsoleColor.Green);
+ ConsoleExtensions.WriteLine($"Cancelling At: {batchCancelResult.CancellingAt}", ConsoleColor.Green);
+ }
+ catch (Exception e)
+ {
+ ConsoleExtensions.WriteLine($"Error: {e.Message}", ConsoleColor.Red);
+ throw;
+ }
+ }
+}
\ No newline at end of file
diff --git a/OpenAI.SDK/EndpointProviders/AzureOpenAiEndpointProvider.cs b/OpenAI.SDK/EndpointProviders/AzureOpenAiEndpointProvider.cs
index be65977d..232632cb 100644
--- a/OpenAI.SDK/EndpointProviders/AzureOpenAiEndpointProvider.cs
+++ b/OpenAI.SDK/EndpointProviders/AzureOpenAiEndpointProvider.cs
@@ -188,6 +188,21 @@ public string AudioCreateSpeech()
return $"{Prefix}/audio/speech{QueryString}";
}
+ public string BatchCreate()
+ {
+ return $"{Prefix}/batches{QueryString}";
+ }
+
+ public string BatchRetrieve(string batchId)
+ {
+ return $"{Prefix}/batches/{batchId}{QueryString}";
+ }
+
+ public string BatchCancel(string batchId)
+ {
+ return $"{Prefix}/batches/{batchId}/cancel{QueryString}";
+ }
+
private string Files()
{
return $"{Prefix}/files{QueryString}";
diff --git a/OpenAI.SDK/EndpointProviders/IOpenAiEndpointProvider.cs b/OpenAI.SDK/EndpointProviders/IOpenAiEndpointProvider.cs
index 8429637e..3668721c 100644
--- a/OpenAI.SDK/EndpointProviders/IOpenAiEndpointProvider.cs
+++ b/OpenAI.SDK/EndpointProviders/IOpenAiEndpointProvider.cs
@@ -34,4 +34,7 @@ internal interface IOpenAiEndpointProvider
string AudioCreateTranscription();
string AudioCreateTranslation();
string AudioCreateSpeech();
+ string BatchCreate();
+ string BatchRetrieve(string batchId);
+ string BatchCancel(string batchId);
}
\ No newline at end of file
diff --git a/OpenAI.SDK/EndpointProviders/OpenAiEndpointProvider.cs b/OpenAI.SDK/EndpointProviders/OpenAiEndpointProvider.cs
index 61692008..2d91467b 100644
--- a/OpenAI.SDK/EndpointProviders/OpenAiEndpointProvider.cs
+++ b/OpenAI.SDK/EndpointProviders/OpenAiEndpointProvider.cs
@@ -47,6 +47,21 @@ public string AudioCreateSpeech()
return $"{_apiVersion}/audio/speech";
}
+ public string BatchCreate()
+ {
+ return $"{_apiVersion}/batches";
+ }
+
+ public string BatchRetrieve(string batchId)
+ {
+ return $"{_apiVersion}/batches/{batchId}";
+ }
+
+ public string BatchCancel(string batchId)
+ {
+ return $"{_apiVersion}/batches/{batchId}/cancel";
+ }
+
public string EditCreate()
{
return $"{_apiVersion}/edits";
diff --git a/OpenAI.SDK/Interfaces/IBatchService.cs b/OpenAI.SDK/Interfaces/IBatchService.cs
new file mode 100644
index 00000000..230d1aaf
--- /dev/null
+++ b/OpenAI.SDK/Interfaces/IBatchService.cs
@@ -0,0 +1,32 @@
+using OpenAI.ObjectModels.RequestModels;
+using OpenAI.ObjectModels.ResponseModels.BatchResponseModel;
+
+namespace OpenAI.Interfaces;
+
+public interface IBatchService
+{
+ ///
+ /// Creates and executes a batch from an uploaded file of requests.
+ ///
+ ///
+ ///
+ /// The created Batch object.
+ Task BatchCreate(BatchCreateRequest request, CancellationToken cancellationToken = default);
+
+ ///
+ /// Retrieves a batch.
+ ///
+ /// The ID of the batch to retrieve.
+ ///
+ /// The Batch object matching the specified ID.
+ Task BatchRetrieve(string batchId, CancellationToken cancellationToken = default);
+
+ ///
+ /// Cancels an in-progress batch.
+ ///
+ /// The ID of the batch to cancel.
+ ///
+ /// The Batch object matching the specified ID.
+ Task BatchCancel(string batchId, CancellationToken cancellationToken = default);
+
+}
\ No newline at end of file
diff --git a/OpenAI.SDK/Interfaces/IOpenAIService.cs b/OpenAI.SDK/Interfaces/IOpenAIService.cs
index 671865d4..114f7bd9 100644
--- a/OpenAI.SDK/Interfaces/IOpenAIService.cs
+++ b/OpenAI.SDK/Interfaces/IOpenAIService.cs
@@ -54,6 +54,10 @@ public interface IOpenAIService
/// Given an audio file, the model will return a transcription of the audio.
///
public IAudioService Audio { get; }
+ ///
+ /// Create large batches of API requests to run asynchronously.
+ ///
+ public IBatchService Batch{ get; }
///
diff --git a/OpenAI.SDK/Managers/OpenAIBatchService.cs b/OpenAI.SDK/Managers/OpenAIBatchService.cs
new file mode 100644
index 00000000..b87ac15b
--- /dev/null
+++ b/OpenAI.SDK/Managers/OpenAIBatchService.cs
@@ -0,0 +1,28 @@
+using OpenAI.Extensions;
+using OpenAI.Interfaces;
+using OpenAI.ObjectModels.RequestModels;
+using OpenAI.ObjectModels.ResponseModels.BatchResponseModel;
+using System.Net.Http.Json;
+
+namespace OpenAI.Managers;
+
+public partial class OpenAIService : IBatchService
+{
+ ///
+ public async Task BatchCreate(BatchCreateRequest request, CancellationToken cancellationToken = default)
+ {
+ return await _httpClient.PostAndReadAsAsync(_endpointProvider.BatchCreate(), request, cancellationToken);
+ }
+
+ ///
+ public async Task BatchRetrieve(string batchId, CancellationToken cancellationToken = default)
+ {
+ return await _httpClient.GetFromJsonAsync(_endpointProvider.BatchRetrieve(batchId), cancellationToken);
+ }
+
+ ///
+ public async Task BatchCancel(string batchId, CancellationToken cancellationToken = default)
+ {
+ return await _httpClient.PostAndReadAsAsync(_endpointProvider.BatchCancel(batchId),null, cancellationToken);
+ }
+}
\ No newline at end of file
diff --git a/OpenAI.SDK/Managers/OpenAIService.cs b/OpenAI.SDK/Managers/OpenAIService.cs
index 7ed568ae..a91d3c40 100644
--- a/OpenAI.SDK/Managers/OpenAIService.cs
+++ b/OpenAI.SDK/Managers/OpenAIService.cs
@@ -101,6 +101,9 @@ public void Dispose()
///
public IAudioService Audio => this;
+
+ ///
+ public IBatchService Batch => this;
///
/// Sets default Model Id
diff --git a/OpenAI.SDK/ObjectModels/RequestModels/BatchCreateRequest.cs b/OpenAI.SDK/ObjectModels/RequestModels/BatchCreateRequest.cs
new file mode 100644
index 00000000..aa59a665
--- /dev/null
+++ b/OpenAI.SDK/ObjectModels/RequestModels/BatchCreateRequest.cs
@@ -0,0 +1,32 @@
+using System.Text.Json.Serialization;
+
+namespace OpenAI.ObjectModels.RequestModels;
+
+public record BatchCreateRequest
+{
+ ///
+ /// The ID of an uploaded file that contains requests for the new batch.
+ /// See [upload file](/docs/api-reference/files/create) for how to upload a file.
+ /// Your input file must be formatted as a JSONL file, and must be uploaded with the purpose `batch`.
+ ///
+ [JsonPropertyName("input_file_id")]
+ public string InputFileId { get; set; }
+
+ ///
+ /// The endpoint to be used for all requests in the batch. Currently only `/v1/chat/completions` is supported.
+ ///
+ [JsonPropertyName("endpoint")]
+ public string Endpoint { get; set; }
+
+ ///
+ /// The time frame within which the batch should be processed. Currently only `24h` is supported.
+ ///
+ [JsonPropertyName("completion_window")]
+ public string CompletionWindow { get; set; }
+
+ ///
+ /// Optional custom metadata for the batch.
+ ///
+ [JsonPropertyName("metadata")]
+ public Dictionary? MetaData { get; set; }
+}
\ No newline at end of file
diff --git a/OpenAI.SDK/ObjectModels/ResponseModels/BaseResponse.cs b/OpenAI.SDK/ObjectModels/ResponseModels/BaseResponse.cs
index be675868..34bf9b4f 100644
--- a/OpenAI.SDK/ObjectModels/ResponseModels/BaseResponse.cs
+++ b/OpenAI.SDK/ObjectModels/ResponseModels/BaseResponse.cs
@@ -67,6 +67,8 @@ public class Error
[JsonPropertyName("type")] public string? Type { get; set; }
+ [JsonPropertyName("line")]
+ public int? Line { get; set; }
[JsonIgnore] public string? Message { get; private set; }
[JsonIgnore] public List Messages { get; private set; }
diff --git a/OpenAI.SDK/ObjectModels/ResponseModels/BatchResponseModel/BatchResponse.cs b/OpenAI.SDK/ObjectModels/ResponseModels/BatchResponseModel/BatchResponse.cs
new file mode 100644
index 00000000..b2260d79
--- /dev/null
+++ b/OpenAI.SDK/ObjectModels/ResponseModels/BatchResponseModel/BatchResponse.cs
@@ -0,0 +1,119 @@
+using System.Text.Json.Serialization;
+
+namespace OpenAI.ObjectModels.ResponseModels.BatchResponseModel;
+
+public record BatchResponse : BaseResponse
+{
+ ///
+ ///
+ ///
+ [JsonPropertyName("id")]
+ public string Id { get; set; }
+
+ ///
+ /// The OpenAI API endpoint used by the batch.
+ ///
+ [JsonPropertyName("endpoint")]
+ public string Endpoint { get; set; }
+
+ [JsonPropertyName("errors")]
+ public List? Errors { get; set; }
+
+ ///
+ /// The ID of the input file for the batch.
+ ///
+ [JsonPropertyName("input_file_id")]
+ public string InputFileId { get; set; }
+
+ ///
+ /// The time frame within which the batch should be processed.
+ ///
+ [JsonPropertyName("completion_window")]
+ public string CompletionWindow { get; set; }
+
+ ///
+ /// The current status of the batch.
+ ///
+ [JsonPropertyName("status")]
+ public string Status { get; set; }
+
+ ///
+ /// The ID of the file containing the outputs of successfully executed requests.
+ ///
+ [JsonPropertyName("output_file_id")]
+ public string? OutputFileId { get; set; }
+
+ ///
+ /// The ID of the file containing the outputs of requests with errors.
+ ///
+ [JsonPropertyName("error_file_id")]
+ public string? ErrorFileId { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch was created.
+ ///
+ [JsonPropertyName("created_at")]
+ public int CreatedAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch started processing.
+ ///
+ [JsonPropertyName("in_progress_at")]
+ public int? InProgressAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch will expire.
+ ///
+ [JsonPropertyName("expires_at")]
+ public int? ExpiresAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch started finalizing.
+ ///
+ [JsonPropertyName("finalizing_at")]
+ public int? FinalizingAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch was completed.
+ ///
+ [JsonPropertyName("completed_at")]
+ public int? CompletedAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch failed.
+ ///
+ [JsonPropertyName("failed_at")]
+ public int? FailedAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch expired.
+ ///
+ [JsonPropertyName("expired_at")]
+ public int? ExpiredAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch started cancelling.
+ ///
+ [JsonPropertyName("cancelling_at")]
+ public int? CancellingAt { get; set; }
+
+ ///
+ /// The Unix timestamp (in seconds) for when the batch was cancelled.
+ ///
+ [JsonPropertyName("cancelled_at")]
+ public int? CancelledAt { get; set; }
+
+ ///
+ /// The request counts for different statuses within the batch.
+ ///
+ [JsonPropertyName("request_counts")]
+ public RequestCountsResponse RequestCounts { get; set; }
+
+ ///
+ /// Set of 16 key-value pairs that can be attached to an object.
+ /// This can be useful for storing additional information about the object in a structured format.
+ /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long.
+ ///
+ [JsonPropertyName("metadata")]
+ public Dictionary? MetaData { get; set; }
+}
\ No newline at end of file
diff --git a/OpenAI.SDK/ObjectModels/ResponseModels/BatchResponseModel/RequestCountsResponse.cs b/OpenAI.SDK/ObjectModels/ResponseModels/BatchResponseModel/RequestCountsResponse.cs
new file mode 100644
index 00000000..c056f971
--- /dev/null
+++ b/OpenAI.SDK/ObjectModels/ResponseModels/BatchResponseModel/RequestCountsResponse.cs
@@ -0,0 +1,24 @@
+using System.Text.Json.Serialization;
+
+namespace OpenAI.ObjectModels.ResponseModels.BatchResponseModel;
+
+public record RequestCountsResponse
+{
+ ///
+ /// Total number of requests in the batch.
+ ///
+ [JsonPropertyName("total")]
+ public int Total { get; set; }
+
+ ///
+ /// Number of requests that have been completed successfully.
+ ///
+ [JsonPropertyName("completed")]
+ public int Completed { get; set; }
+
+ ///
+ /// Number of requests that have failed.
+ ///
+ [JsonPropertyName("failed")]
+ public int Failed { get; set; }
+}
\ No newline at end of file
diff --git a/OpenAI.SDK/OpenAI.csproj b/OpenAI.SDK/OpenAI.csproj
index 7bab23c3..fc977dfd 100644
--- a/OpenAI.SDK/OpenAI.csproj
+++ b/OpenAI.SDK/OpenAI.csproj
@@ -10,7 +10,7 @@
OpenAI-Betalgo.png
true
OpenAI SDK by Betalgo
- 8.0.1
+ 8.1.0
Tolga Kayhan, Betalgo
Betalgo Up Ltd.
OpenAI ChatGPT, Whisper, GPT-4 and DALL·E dotnet SDK
diff --git a/Readme.md b/Readme.md
index 45326e8c..40a5c40a 100644
--- a/Readme.md
+++ b/Readme.md
@@ -304,6 +304,8 @@ I will always be using the latest libraries, and future releases will frequently
I am incredibly busy. If I forgot your name, please accept my apologies and let me know so I can add it to the list.
## Changelog
+### 8.1.0
+- Added support for Batch API
### 8.0.1
- Added support for new Models `gpt-4-turbo` and `gpt-4-turbo-2024-04-09` thanks to @ChaseIngersol
### 8.0.0